##// END OF EJS Templates
Closed
Pull request !2235 Created on Sat, 13 Oct 2018 19:27:30, by
  • ini: added new key
  • registration: add a way to disable registration
  • Update register.jinja2
  • alert_channels: it should also be a pkey
  • alert_channels: allow binding to resources
Pull request versions not available.
ver Time Author Commit Description
13 commits hidden, click expand to show them.

The requested changes are too big and content was truncated. Show full diff

@@ -0,0 +1,7 b''
1 [bumpversion]
2 current_version = 1.1.0
3 message = release: Bump version {current_version} to {new_version}
4 tag_name = {new_version}
5
6 [bumpversion:file:backend/src/appenlight/VERSION]
7
@@ -0,0 +1,161 b''
1 # Created by .ignore support plugin (hsz.mobi)
2 syntax: glob
3
4 ### Example user template template
5 ### Example user template
6
7 .venv
8 # IntelliJ project files
9 .idea
10 *.iml
11 out
12 gen### Python template
13 # Byte-compiled / optimized / DLL files
14 __pycache__/
15 *.py[cod]
16 *$py.class
17
18 # C extensions
19 *.so
20
21 # Distribution / packaging
22 .Python
23 env/
24 build/
25 develop-eggs/
26 dist/
27 downloads/
28 eggs/
29 .eggs/
30 $lib
31 lib64/
32 parts/
33 sdist/
34 var/
35 *.egg-info/
36 .installed.cfg
37 *.egg
38
39 # PyInstaller
40 # Usually these files are written by a python script from a template
41 # before PyInstaller builds the exe, so as to inject date/other infos into it.
42 *.manifest
43 *.spec
44
45 # Installer logs
46 pip-log.txt
47 pip-delete-this-directory.txt
48
49 # Unit test / coverage reports
50 htmlcov/
51 .tox/
52 .coverage
53 .coverage.*
54 .cache
55 nosetests.xml
56 coverage.xml
57 *,cover
58 .hypothesis/
59
60 # Translations
61 *.mo
62 *.pot
63
64 # Mac stuff:
65 *.DS_Store
66
67 # Django stuff:
68 *.log
69 local_settings.py
70
71 # Flask instance folder
72 instance/
73
74 # Scrapy stuff:
75 .scrapy
76
77 # Sphinx documentation
78 docs/_build/
79
80 # PyBuilder
81 target/
82
83 # IPython Notebook
84 .ipynb_checkpoints
85
86 # pyenv
87 .python-version
88
89 # celery beat schedule file
90 celerybeat-schedule
91
92 # dotenv
93 .env
94
95 # virtualenv
96 venv/
97 ENV/
98
99 # Spyder project settings
100 .spyderproject
101
102 # Rope project settings
103 .ropeproject
104
105
106 syntax: regexp
107 ^\.idea$
108 syntax: regexp
109 ^\.settings$
110 syntax: regexp
111 ^data$
112 syntax: regexp
113 ^webassets$
114 syntax: regexp
115 ^dist$
116 syntax: regexp
117 ^\.project$
118 syntax: regexp
119 ^\.pydevproject$
120 syntax: regexp
121 ^private$
122 syntax: regexp
123 ^appenlight_frontend/build$
124 syntax: regexp
125 ^appenlight_frontend/bower_components$
126 syntax: regexp
127 ^appenlight_frontend/node_modules$
128 ^src/node_modules$
129 syntax: regexp
130 ^\.pydevproject$
131 syntax: regexp
132 appenlight\.egg-info$
133 syntax: regexp
134 \.pyc$
135 syntax: regexp
136 \celerybeat.*
137 syntax: regexp
138 \.iml$
139 syntax: regexp
140 ^frontend/build$
141 syntax: regexp
142 ^frontend/bower_components$
143 syntax: regexp
144 ^frontend/node_modules$
145 ^frontend/src/node_modules$
146 ^frontend/build$
147
148 syntax: regexp
149 \.db$
150
151 syntax: regexp
152 packer_cache
153
154 syntax: regexp
155 packer/packer
156
157 syntax: regexp
158 install_appenlight_production.yaml
159 ^docs/result$
160 ^docs/Appenlight-docs/_build$
161 ^docs/www$
@@ -0,0 +1,16 b''
1 .. :changelog:
2
3 History
4 -------
5
6 0.9.1 (2016-XX-XX)
7 ++++++++++++++++++
8
9 * Added suppot for "NOT' operator in rule engine
10 * Various bugfixes
11
12
13 0.9.0 (2016-06-29)
14 ++++++++++++++++++
15
16 * first tagged public release
@@ -0,0 +1,3 b''
1 [pip2nix]
2 requirements = ., -r ./requirements.txt
3 output = ./python-packages.nix
This diff has been collapsed as it changes many lines, (1082 lines changed) Show them Hide them
@@ -0,0 +1,1082 b''
1 {
2 Jinja2 = super.buildPythonPackage {
3 name = "Jinja2-2.8";
4 buildInputs = with self; [];
5 doCheck = false;
6 propagatedBuildInputs = with self; [MarkupSafe];
7 src = fetchurl {
8 url = "https://pypi.python.org/packages/f2/2f/0b98b06a345a761bec91a079ccae392d282690c2d8272e708f4d10829e22/Jinja2-2.8.tar.gz";
9 md5 = "edb51693fe22c53cee5403775c71a99e";
10 };
11 meta = {
12 license = [ pkgs.lib.licenses.bsdOriginal ];
13 };
14 };
15 Mako = super.buildPythonPackage {
16 name = "Mako-1.0.4";
17 buildInputs = with self; [];
18 doCheck = false;
19 propagatedBuildInputs = with self; [MarkupSafe];
20 src = fetchurl {
21 url = "https://pypi.python.org/packages/7a/ae/925434246ee90b42e8ef57d3b30a0ab7caf9a2de3e449b876c56dcb48155/Mako-1.0.4.tar.gz";
22 md5 = "c5fc31a323dd4990683d2f2da02d4e20";
23 };
24 meta = {
25 license = [ pkgs.lib.licenses.mit ];
26 };
27 };
28 MarkupSafe = super.buildPythonPackage {
29 name = "MarkupSafe-0.23";
30 buildInputs = with self; [];
31 doCheck = false;
32 propagatedBuildInputs = with self; [];
33 src = fetchurl {
34 url = "https://pypi.python.org/packages/c0/41/bae1254e0396c0cc8cf1751cb7d9afc90a602353695af5952530482c963f/MarkupSafe-0.23.tar.gz";
35 md5 = "f5ab3deee4c37cd6a922fb81e730da6e";
36 };
37 meta = {
38 license = [ pkgs.lib.licenses.bsdOriginal ];
39 };
40 };
41 PasteDeploy = super.buildPythonPackage {
42 name = "PasteDeploy-1.5.2";
43 buildInputs = with self; [];
44 doCheck = false;
45 propagatedBuildInputs = with self; [];
46 src = fetchurl {
47 url = "https://pypi.python.org/packages/0f/90/8e20cdae206c543ea10793cbf4136eb9a8b3f417e04e40a29d72d9922cbd/PasteDeploy-1.5.2.tar.gz";
48 md5 = "352b7205c78c8de4987578d19431af3b";
49 };
50 meta = {
51 license = [ pkgs.lib.licenses.mit ];
52 };
53 };
54 SQLAlchemy = super.buildPythonPackage {
55 name = "SQLAlchemy-1.0.12";
56 buildInputs = with self; [];
57 doCheck = false;
58 propagatedBuildInputs = with self; [];
59 src = fetchurl {
60 url = "https://pypi.python.org/packages/5c/52/9b48cd58eac58cae2a27923ff34c783f390b95413ff65669a86e98f80829/SQLAlchemy-1.0.12.tar.gz";
61 md5 = "6d19ef29883bbebdcac6613cf391cac4";
62 };
63 meta = {
64 license = [ pkgs.lib.licenses.mit ];
65 };
66 };
67 WebOb = super.buildPythonPackage {
68 name = "WebOb-1.6.1";
69 buildInputs = with self; [];
70 doCheck = false;
71 propagatedBuildInputs = with self; [];
72 src = fetchurl {
73 url = "https://pypi.python.org/packages/5d/c7/7c1565b188d8d32bf3657a24b9d71621e35ba20ec4179a0a7f9803511099/WebOb-1.6.1.tar.gz";
74 md5 = "04049d82e9d12dd91f6f46f54cc826aa";
75 };
76 meta = {
77 license = [ pkgs.lib.licenses.mit ];
78 };
79 };
80 alembic = super.buildPythonPackage {
81 name = "alembic-0.8.6";
82 buildInputs = with self; [];
83 doCheck = false;
84 propagatedBuildInputs = with self; [SQLAlchemy Mako python-editor];
85 src = fetchurl {
86 url = "https://pypi.python.org/packages/d2/c3/fdb752aa39832d056aeac958f35f1fb9fb9397a52bdab9248adcbd9f17d9/alembic-0.8.6.tar.gz";
87 md5 = "6517b160e576cedf9b7625a18a9bc594";
88 };
89 meta = {
90 license = [ pkgs.lib.licenses.mit ];
91 };
92 };
93 amqp = super.buildPythonPackage {
94 name = "amqp-1.4.9";
95 buildInputs = with self; [];
96 doCheck = false;
97 propagatedBuildInputs = with self; [];
98 src = fetchurl {
99 url = "https://pypi.python.org/packages/cc/a4/f265c6f9a7eb1dd45d36d9ab775520e07ff575b11ad21156f9866da047b2/amqp-1.4.9.tar.gz";
100 md5 = "df57dde763ba2dea25b3fa92dfe43c19";
101 };
102 meta = {
103 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "LGPL"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
104 };
105 };
106 anyjson = super.buildPythonPackage {
107 name = "anyjson-0.3.3";
108 buildInputs = with self; [];
109 doCheck = false;
110 propagatedBuildInputs = with self; [];
111 src = fetchurl {
112 url = "https://pypi.python.org/packages/c3/4d/d4089e1a3dd25b46bebdb55a992b0797cff657b4477bc32ce28038fdecbc/anyjson-0.3.3.tar.gz";
113 md5 = "2ea28d6ec311aeeebaf993cb3008b27c";
114 };
115 meta = {
116 license = [ pkgs.lib.licenses.bsdOriginal ];
117 };
118 };
119 appenlight = super.buildPythonPackage {
120 name = "appenlight-0.9.0";
121 buildInputs = with self; [];
122 doCheck = false;
123 propagatedBuildInputs = with self; [repoze.sendmail pyramid pyramid-tm pyramid-debugtoolbar pyramid-authstack SQLAlchemy alembic webhelpers2 transaction zope.sqlalchemy pyramid-mailer redis redlock-py pyramid-jinja2 psycopg2 wtforms celery formencode psutil ziggurat-foundations bcrypt appenlight-client markdown colander defusedxml dogpile.cache pyramid-redis-sessions simplejson waitress gunicorn requests requests-oauthlib gevent gevent-websocket pygments lxml paginate paginate-sqlalchemy pyelasticsearch six mock itsdangerous camplight jira python-dateutil authomatic cryptography webassets];
124 src = ./.;
125 meta = {
126 license = [ { fullName = "AGPLv3, and Commercial License"; } ];
127 };
128 };
129 appenlight-client = super.buildPythonPackage {
130 name = "appenlight-client-0.6.17";
131 buildInputs = with self; [];
132 doCheck = false;
133 propagatedBuildInputs = with self; [WebOb requests six];
134 src = fetchurl {
135 url = "https://pypi.python.org/packages/af/86/1075f162d6534080f7f6ed9d8a83254e8f0be90c0a3e7ead9feffbe4423f/appenlight_client-0.6.17.tar.gz";
136 md5 = "2f4d8229ce2dba607a9077210857e0e5";
137 };
138 meta = {
139 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "DFSG approved"; } ];
140 };
141 };
142 authomatic = super.buildPythonPackage {
143 name = "authomatic-0.1.0.post1";
144 buildInputs = with self; [];
145 doCheck = false;
146 propagatedBuildInputs = with self; [];
147 src = fetchurl {
148 url = "https://pypi.python.org/packages/08/1a/8a930461e604c2d5a7a871e1ac59fa82ccf994c32e807230c8d2fb07815a/Authomatic-0.1.0.post1.tar.gz";
149 md5 = "be3f3ce08747d776aae6d6cc8dcb49a9";
150 };
151 meta = {
152 license = [ pkgs.lib.licenses.mit ];
153 };
154 };
155 bcrypt = super.buildPythonPackage {
156 name = "bcrypt-2.0.0";
157 buildInputs = with self; [];
158 doCheck = false;
159 propagatedBuildInputs = with self; [cffi six];
160 src = fetchurl {
161 url = "https://pypi.python.org/packages/11/7d/4c7980d04314466de42ea804db71995c9b3a2a47dc79a63c51f1be0cfd50/bcrypt-2.0.0.tar.gz";
162 md5 = "e7fb17be46904cdb2ae6a062859ee58c";
163 };
164 meta = {
165 license = [ pkgs.lib.licenses.asl20 ];
166 };
167 };
168 billiard = super.buildPythonPackage {
169 name = "billiard-3.3.0.23";
170 buildInputs = with self; [];
171 doCheck = false;
172 propagatedBuildInputs = with self; [];
173 src = fetchurl {
174 url = "https://pypi.python.org/packages/64/a6/d7b6fb7bd0a4680a41f1d4b27061c7b768c673070ba8ac116f865de4e7ca/billiard-3.3.0.23.tar.gz";
175 md5 = "6ee416e1e7c8d8164ce29d7377cca6a4";
176 };
177 meta = {
178 license = [ pkgs.lib.licenses.bsdOriginal ];
179 };
180 };
181 camplight = super.buildPythonPackage {
182 name = "camplight-0.9.6";
183 buildInputs = with self; [];
184 doCheck = false;
185 propagatedBuildInputs = with self; [requests];
186 src = fetchurl {
187 url = "https://pypi.python.org/packages/60/df/bed89a1f1d06632b192eff09a8fa75f85e0080ff70229c8145fbc3b2afa8/camplight-0.9.6.tar.gz";
188 md5 = "716cc7a4ea30da34ae4fcbfe2784ce59";
189 };
190 meta = {
191 license = [ pkgs.lib.licenses.mit ];
192 };
193 };
194 celery = super.buildPythonPackage {
195 name = "celery-3.1.23";
196 buildInputs = with self; [];
197 doCheck = false;
198 propagatedBuildInputs = with self; [pytz billiard kombu];
199 src = fetchurl {
200 url = "https://pypi.python.org/packages/ea/a6/6da0bac3ea8abbc2763fd2664af2955702f97f140f2d7277069445532b7c/celery-3.1.23.tar.gz";
201 md5 = "c6f10f956a49424d553ab1391ab39ab2";
202 };
203 meta = {
204 license = [ pkgs.lib.licenses.bsdOriginal ];
205 };
206 };
207 certifi = super.buildPythonPackage {
208 name = "certifi-2016.8.31";
209 buildInputs = with self; [];
210 doCheck = false;
211 propagatedBuildInputs = with self; [];
212 src = fetchurl {
213 url = "https://pypi.python.org/packages/1c/d1/0133a5084f0d17db0270c6061e824a11b0e417d743f5ff4c594f4090ed89/certifi-2016.8.31.tar.gz";
214 md5 = "2f22d484a36d38d98be74f9eeb2846ec";
215 };
216 meta = {
217 license = [ pkgs.lib.licenses.isc ];
218 };
219 };
220 cffi = super.buildPythonPackage {
221 name = "cffi-1.8.2";
222 buildInputs = with self; [];
223 doCheck = false;
224 propagatedBuildInputs = with self; [pycparser];
225 src = fetchurl {
226 url = "https://pypi.python.org/packages/b8/21/9d6f08d2d36a0a8c84623646b4ed5a07023d868823361a086b021fb21172/cffi-1.8.2.tar.gz";
227 md5 = "538f307b6c5169bba41fbfda2b070762";
228 };
229 meta = {
230 license = [ pkgs.lib.licenses.mit ];
231 };
232 };
233 colander = super.buildPythonPackage {
234 name = "colander-1.2";
235 buildInputs = with self; [];
236 doCheck = false;
237 propagatedBuildInputs = with self; [translationstring iso8601];
238 src = fetchurl {
239 url = "https://pypi.python.org/packages/14/23/c9ceba07a6a1dc0eefbb215fc0dc64aabc2b22ee756bc0f0c13278fa0887/colander-1.2.tar.gz";
240 md5 = "83db21b07936a0726e588dae1914b9ed";
241 };
242 meta = {
243 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
244 };
245 };
246 cryptography = super.buildPythonPackage {
247 name = "cryptography-1.2.3";
248 buildInputs = with self; [];
249 doCheck = false;
250 propagatedBuildInputs = with self; [idna pyasn1 six setuptools enum34 ipaddress cffi];
251 src = fetchurl {
252 url = "https://pypi.python.org/packages/8b/7d/9df253f059c8d9a9389f06df5d6301b0725a44dbf055a1f7aff8e455746a/cryptography-1.2.3.tar.gz";
253 md5 = "5474d2b3e8c7555a60852e48d2743f85";
254 };
255 meta = {
256 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "BSD or Apache License, Version 2.0"; } pkgs.lib.licenses.asl20 ];
257 };
258 };
259 defusedxml = super.buildPythonPackage {
260 name = "defusedxml-0.4.1";
261 buildInputs = with self; [];
262 doCheck = false;
263 propagatedBuildInputs = with self; [];
264 src = fetchurl {
265 url = "https://pypi.python.org/packages/09/3b/b1afa9649f48517d027e99413fec54f387f648c90156b3cf6451c8cd45f9/defusedxml-0.4.1.tar.gz";
266 md5 = "230a5eff64f878b392478e30376d673a";
267 };
268 meta = {
269 license = [ pkgs.lib.licenses.psfl ];
270 };
271 };
272 dogpile.cache = super.buildPythonPackage {
273 name = "dogpile.cache-0.5.7";
274 buildInputs = with self; [];
275 doCheck = false;
276 propagatedBuildInputs = with self; [dogpile.core];
277 src = fetchurl {
278 url = "https://pypi.python.org/packages/07/74/2a83bedf758156d9c95d112691bbad870d3b77ccbcfb781b4ef836ea7d96/dogpile.cache-0.5.7.tar.gz";
279 md5 = "3e58ce41af574aab41d78e9c4190f194";
280 };
281 meta = {
282 license = [ pkgs.lib.licenses.bsdOriginal ];
283 };
284 };
285 dogpile.core = super.buildPythonPackage {
286 name = "dogpile.core-0.4.1";
287 buildInputs = with self; [];
288 doCheck = false;
289 propagatedBuildInputs = with self; [];
290 src = fetchurl {
291 url = "https://pypi.python.org/packages/0e/77/e72abc04c22aedf874301861e5c1e761231c288b5de369c18be8f4b5c9bb/dogpile.core-0.4.1.tar.gz";
292 md5 = "01cb19f52bba3e95c9b560f39341f045";
293 };
294 meta = {
295 license = [ pkgs.lib.licenses.bsdOriginal ];
296 };
297 };
298 elasticsearch = super.buildPythonPackage {
299 name = "elasticsearch-1.9.0";
300 buildInputs = with self; [];
301 doCheck = false;
302 propagatedBuildInputs = with self; [urllib3];
303 src = fetchurl {
304 url = "https://pypi.python.org/packages/13/9b/540e311b31a10c2a904acfb08030c656047e5c7ba479d35df2799e5dccfe/elasticsearch-1.9.0.tar.gz";
305 md5 = "3550390baea1639479f79758d66ab032";
306 };
307 meta = {
308 license = [ pkgs.lib.licenses.asl20 ];
309 };
310 };
311 enum34 = super.buildPythonPackage {
312 name = "enum34-1.1.6";
313 buildInputs = with self; [];
314 doCheck = false;
315 propagatedBuildInputs = with self; [];
316 src = fetchurl {
317 url = "https://pypi.python.org/packages/bf/3e/31d502c25302814a7c2f1d3959d2a3b3f78e509002ba91aea64993936876/enum34-1.1.6.tar.gz";
318 md5 = "5f13a0841a61f7fc295c514490d120d0";
319 };
320 meta = {
321 license = [ pkgs.lib.licenses.bsdOriginal ];
322 };
323 };
324 formencode = super.buildPythonPackage {
325 name = "formencode-1.3.0";
326 buildInputs = with self; [];
327 doCheck = false;
328 propagatedBuildInputs = with self; [];
329 src = fetchurl {
330 url = "https://pypi.python.org/packages/99/5b/f71f36b81b42291a70f61104d0eeb1a30be856a052ebe032c37b45db840c/FormEncode-1.3.0.zip";
331 md5 = "6df12d60bf3179402f2c2efd1129eb74";
332 };
333 meta = {
334 license = [ pkgs.lib.licenses.psfl ];
335 };
336 };
337 gevent = super.buildPythonPackage {
338 name = "gevent-1.1.1";
339 buildInputs = with self; [];
340 doCheck = false;
341 propagatedBuildInputs = with self; [greenlet];
342 src = fetchurl {
343 url = "https://pypi.python.org/packages/12/dc/0b2e57823225de86f6e111a65d212c9e3b64847dddaa19691a6cb94b0b2e/gevent-1.1.1.tar.gz";
344 md5 = "1532f5396ab4d07a231f1935483be7c3";
345 };
346 meta = {
347 license = [ pkgs.lib.licenses.mit ];
348 };
349 };
350 gevent-websocket = super.buildPythonPackage {
351 name = "gevent-websocket-0.9.5";
352 buildInputs = with self; [];
353 doCheck = false;
354 propagatedBuildInputs = with self; [gevent];
355 src = fetchurl {
356 url = "https://pypi.python.org/packages/de/93/6bc86ddd65435a56a2f2ea7cc908d92fea894fc08e364156656e71cc1435/gevent-websocket-0.9.5.tar.gz";
357 md5 = "03a8473b9a61426b0ef6094319141389";
358 };
359 meta = {
360 license = [ { fullName = "Copyright 2011-2013 Jeffrey Gelens <jeffrey@noppo.pro>"; } pkgs.lib.licenses.asl20 ];
361 };
362 };
363 greenlet = super.buildPythonPackage {
364 name = "greenlet-0.4.10";
365 buildInputs = with self; [];
366 doCheck = false;
367 propagatedBuildInputs = with self; [];
368 src = fetchurl {
369 url = "https://pypi.python.org/packages/67/62/ca2a95648666eaa2ffeb6a9b3964f21d419ae27f82f2e66b53da5b943fc4/greenlet-0.4.10.zip";
370 md5 = "bed0c4b3b896702131f4d5c72f87c41d";
371 };
372 meta = {
373 license = [ pkgs.lib.licenses.mit ];
374 };
375 };
376 gunicorn = super.buildPythonPackage {
377 name = "gunicorn-19.4.5";
378 buildInputs = with self; [];
379 doCheck = false;
380 propagatedBuildInputs = with self; [];
381 src = fetchurl {
382 url = "https://pypi.python.org/packages/1e/67/95248e17050822ab436c8a43dbfc0625a8545775737e33b66508cffad278/gunicorn-19.4.5.tar.gz";
383 md5 = "ce45c2dccba58784694dd77f23d9a677";
384 };
385 meta = {
386 license = [ pkgs.lib.licenses.mit ];
387 };
388 };
389 idna = super.buildPythonPackage {
390 name = "idna-2.1";
391 buildInputs = with self; [];
392 doCheck = false;
393 propagatedBuildInputs = with self; [];
394 src = fetchurl {
395 url = "https://pypi.python.org/packages/fb/84/8c27516fbaa8147acd2e431086b473c453c428e24e8fb99a1d89ce381851/idna-2.1.tar.gz";
396 md5 = "f6473caa9c5e0cc1ad3fd5d04c3c114b";
397 };
398 meta = {
399 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "BSD-like"; } ];
400 };
401 };
402 ipaddress = super.buildPythonPackage {
403 name = "ipaddress-1.0.17";
404 buildInputs = with self; [];
405 doCheck = false;
406 propagatedBuildInputs = with self; [];
407 src = fetchurl {
408 url = "https://pypi.python.org/packages/bb/26/3b64955ff73f9e3155079b9ed31812afdfa5333b5c76387454d651ef593a/ipaddress-1.0.17.tar.gz";
409 md5 = "8bbf0326719fafb1f453921ef96729fe";
410 };
411 meta = {
412 license = [ pkgs.lib.licenses.psfl ];
413 };
414 };
415 iso8601 = super.buildPythonPackage {
416 name = "iso8601-0.1.11";
417 buildInputs = with self; [];
418 doCheck = false;
419 propagatedBuildInputs = with self; [];
420 src = fetchurl {
421 url = "https://pypi.python.org/packages/c0/75/c9209ee4d1b5975eb8c2cba4428bde6b61bd55664a98290dd015cdb18e98/iso8601-0.1.11.tar.gz";
422 md5 = "b06d11cd14a64096f907086044f0fe38";
423 };
424 meta = {
425 license = [ pkgs.lib.licenses.mit ];
426 };
427 };
428 itsdangerous = super.buildPythonPackage {
429 name = "itsdangerous-0.24";
430 buildInputs = with self; [];
431 doCheck = false;
432 propagatedBuildInputs = with self; [];
433 src = fetchurl {
434 url = "https://pypi.python.org/packages/dc/b4/a60bcdba945c00f6d608d8975131ab3f25b22f2bcfe1dab221165194b2d4/itsdangerous-0.24.tar.gz";
435 md5 = "a3d55aa79369aef5345c036a8a26307f";
436 };
437 meta = {
438 license = [ pkgs.lib.licenses.bsdOriginal ];
439 };
440 };
441 jira = super.buildPythonPackage {
442 name = "jira-1.0.7";
443 buildInputs = with self; [];
444 doCheck = false;
445 propagatedBuildInputs = with self; [requests requests-oauthlib tlslite six requests-toolbelt];
446 src = fetchurl {
447 url = "https://pypi.python.org/packages/4e/36/4f0ab121c3510fce29743c31e2f47e99c2be68ee4441ad395366489351b0/jira-1.0.7.tar.gz";
448 md5 = "cb1d3f1e1b7a388932ad5d961bf2c56d";
449 };
450 meta = {
451 license = [ pkgs.lib.licenses.bsdOriginal ];
452 };
453 };
454 kombu = super.buildPythonPackage {
455 name = "kombu-3.0.35";
456 buildInputs = with self; [];
457 doCheck = false;
458 propagatedBuildInputs = with self; [anyjson amqp];
459 src = fetchurl {
460 url = "https://pypi.python.org/packages/5f/4f/3859b52f6d465d0d4a767516c924ee4f0e1387498ac8d0c30d9942da3762/kombu-3.0.35.tar.gz";
461 md5 = "6483ac8ba7109ec606f5cb9bd084b6ef";
462 };
463 meta = {
464 license = [ pkgs.lib.licenses.bsdOriginal ];
465 };
466 };
467 lxml = super.buildPythonPackage {
468 name = "lxml-3.6.0";
469 buildInputs = with self; [];
470 doCheck = false;
471 propagatedBuildInputs = with self; [];
472 src = fetchurl {
473 url = "https://pypi.python.org/packages/11/1b/fe6904151b37a0d6da6e60c13583945f8ce3eae8ebd0ec763ce546358947/lxml-3.6.0.tar.gz";
474 md5 = "5957cc384bd6e83934be35c057ec03b6";
475 };
476 meta = {
477 license = [ pkgs.lib.licenses.bsdOriginal ];
478 };
479 };
480 markdown = super.buildPythonPackage {
481 name = "markdown-2.5";
482 buildInputs = with self; [];
483 doCheck = false;
484 propagatedBuildInputs = with self; [];
485 src = fetchurl {
486 url = "https://pypi.python.org/packages/16/7f/034572fbc66f76a626156c9500349f5b384ca1f38194318ddde32bc2fcb0/Markdown-2.5.zip";
487 md5 = "053e5614f7efc06ac0fcd6954678096c";
488 };
489 meta = {
490 license = [ pkgs.lib.licenses.bsdOriginal ];
491 };
492 };
493 mock = super.buildPythonPackage {
494 name = "mock-1.0.1";
495 buildInputs = with self; [];
496 doCheck = false;
497 propagatedBuildInputs = with self; [];
498 src = fetchurl {
499 url = "https://pypi.python.org/packages/15/45/30273ee91feb60dabb8fbb2da7868520525f02cf910279b3047182feed80/mock-1.0.1.zip";
500 md5 = "869f08d003c289a97c1a6610faf5e913";
501 };
502 meta = {
503 license = [ pkgs.lib.licenses.bsdOriginal ];
504 };
505 };
506 oauthlib = super.buildPythonPackage {
507 name = "oauthlib-2.0.0";
508 buildInputs = with self; [];
509 doCheck = false;
510 propagatedBuildInputs = with self; [];
511 src = fetchurl {
512 url = "https://pypi.python.org/packages/ce/92/7f07412a4f04e55c1e83a09c6fd48075b5df96c1dbd4078c3407c5be1dff/oauthlib-2.0.0.tar.gz";
513 md5 = "79b83aa677fc45d1ea28deab7445b4ca";
514 };
515 meta = {
516 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "OSI Approved"; } ];
517 };
518 };
519 paginate = super.buildPythonPackage {
520 name = "paginate-0.5.4";
521 buildInputs = with self; [];
522 doCheck = false;
523 propagatedBuildInputs = with self; [];
524 src = fetchurl {
525 url = "https://pypi.python.org/packages/52/2e/2c3a5647d3f7583355743d73841d03c8b50b97983a478a8f82d3cb9f4a5f/paginate-0.5.4.tar.gz";
526 md5 = "91fdb133f85ac73c6616feba38976e95";
527 };
528 meta = {
529 license = [ pkgs.lib.licenses.mit ];
530 };
531 };
532 paginate-sqlalchemy = super.buildPythonPackage {
533 name = "paginate-sqlalchemy-0.2.0";
534 buildInputs = with self; [];
535 doCheck = false;
536 propagatedBuildInputs = with self; [SQLAlchemy paginate];
537 src = fetchurl {
538 url = "https://pypi.python.org/packages/25/64/fe572514615971fc235e95798ae0e2ee3beeccf43272c623a0a6b082d2d6/paginate_sqlalchemy-0.2.0.tar.gz";
539 md5 = "4ca097c4132f43cd72c6a1795b6bbb5d";
540 };
541 meta = {
542 license = [ pkgs.lib.licenses.mit ];
543 };
544 };
545 passlib = super.buildPythonPackage {
546 name = "passlib-1.6.5";
547 buildInputs = with self; [];
548 doCheck = false;
549 propagatedBuildInputs = with self; [];
550 src = fetchurl {
551 url = "https://pypi.python.org/packages/1e/59/d1a50836b29c87a1bde9442e1846aa11e1548491cbee719e51b45a623e75/passlib-1.6.5.tar.gz";
552 md5 = "d2edd6c42cde136a538b48d90a06ad67";
553 };
554 meta = {
555 license = [ pkgs.lib.licenses.bsdOriginal ];
556 };
557 };
558 psutil = super.buildPythonPackage {
559 name = "psutil-2.1.2";
560 buildInputs = with self; [];
561 doCheck = false;
562 propagatedBuildInputs = with self; [];
563 src = fetchurl {
564 url = "https://pypi.python.org/packages/53/6a/8051b913b2f94eb00fd045fe9e14a7182b6e7f088b12c308edd7616a559b/psutil-2.1.2.tar.gz";
565 md5 = "1969c9b3e256f5ce8fb90c5d0124233e";
566 };
567 meta = {
568 license = [ pkgs.lib.licenses.bsdOriginal ];
569 };
570 };
571 psycopg2 = super.buildPythonPackage {
572 name = "psycopg2-2.6.1";
573 buildInputs = with self; [];
574 doCheck = false;
575 propagatedBuildInputs = with self; [];
576 src = fetchurl {
577 url = "https://pypi.python.org/packages/86/fd/cc8315be63a41fe000cce20482a917e874cdc1151e62cb0141f5e55f711e/psycopg2-2.6.1.tar.gz";
578 md5 = "842b44f8c95517ed5b792081a2370da1";
579 };
580 meta = {
581 license = [ pkgs.lib.licenses.zpt21 { fullName = "GNU Library or Lesser General Public License (LGPL)"; } { fullName = "LGPL with exceptions or ZPL"; } ];
582 };
583 };
584 pyasn1 = super.buildPythonPackage {
585 name = "pyasn1-0.1.9";
586 buildInputs = with self; [];
587 doCheck = false;
588 propagatedBuildInputs = with self; [];
589 src = fetchurl {
590 url = "https://pypi.python.org/packages/f7/83/377e3dd2e95f9020dbd0dfd3c47aaa7deebe3c68d3857a4e51917146ae8b/pyasn1-0.1.9.tar.gz";
591 md5 = "f00a02a631d4016818659d1cc38d229a";
592 };
593 meta = {
594 license = [ pkgs.lib.licenses.bsdOriginal ];
595 };
596 };
597 pycparser = super.buildPythonPackage {
598 name = "pycparser-2.14";
599 buildInputs = with self; [];
600 doCheck = false;
601 propagatedBuildInputs = with self; [];
602 src = fetchurl {
603 url = "https://pypi.python.org/packages/6d/31/666614af3db0acf377876d48688c5d334b6e493b96d21aa7d332169bee50/pycparser-2.14.tar.gz";
604 md5 = "a2bc8d28c923b4fe2b2c3b4b51a4f935";
605 };
606 meta = {
607 license = [ pkgs.lib.licenses.bsdOriginal ];
608 };
609 };
610 pyelasticsearch = super.buildPythonPackage {
611 name = "pyelasticsearch-1.4";
612 buildInputs = with self; [];
613 doCheck = false;
614 propagatedBuildInputs = with self; [certifi elasticsearch urllib3 simplejson six];
615 src = fetchurl {
616 url = "https://pypi.python.org/packages/2f/3a/7643cfcfc4cbdbb20ada800bbd54ac9705d0c047d7b8f8d5eeeb3047b4eb/pyelasticsearch-1.4.tar.gz";
617 md5 = "ed61ebb7b253364e55b4923d11e17049";
618 };
619 meta = {
620 license = [ pkgs.lib.licenses.bsdOriginal ];
621 };
622 };
623 pygments = super.buildPythonPackage {
624 name = "pygments-2.1.3";
625 buildInputs = with self; [];
626 doCheck = false;
627 propagatedBuildInputs = with self; [];
628 src = fetchurl {
629 url = "https://pypi.python.org/packages/b8/67/ab177979be1c81bc99c8d0592ef22d547e70bb4c6815c383286ed5dec504/Pygments-2.1.3.tar.gz";
630 md5 = "ed3fba2467c8afcda4d317e4ef2c6150";
631 };
632 meta = {
633 license = [ pkgs.lib.licenses.bsdOriginal ];
634 };
635 };
636 pyramid = super.buildPythonPackage {
637 name = "pyramid-1.7.3";
638 buildInputs = with self; [];
639 doCheck = false;
640 propagatedBuildInputs = with self; [setuptools WebOb repoze.lru zope.interface zope.deprecation venusian translationstring PasteDeploy];
641 src = fetchurl {
642 url = "https://pypi.python.org/packages/9c/6d/9b9f9acf22c5d221f25cf6756645bce9ea54ee741466197674fe77f2eee3/pyramid-1.7.3.tar.gz";
643 md5 = "5f154c8c352ef013e6e412be02bbb576";
644 };
645 meta = {
646 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
647 };
648 };
649 pyramid-authstack = super.buildPythonPackage {
650 name = "pyramid-authstack-1.0.1";
651 buildInputs = with self; [];
652 doCheck = false;
653 propagatedBuildInputs = with self; [pyramid zope.interface];
654 src = fetchurl {
655 url = "https://pypi.python.org/packages/01/4b/e84cb8fda19f0f03f96231195fd074212b9291f732aa07f90edcfb21ff34/pyramid_authstack-1.0.1.tar.gz";
656 md5 = "8e199862b5a5cd6385f7d5209cee2f12";
657 };
658 meta = {
659 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.mit ];
660 };
661 };
662 pyramid-debugtoolbar = super.buildPythonPackage {
663 name = "pyramid-debugtoolbar-3.0.4";
664 buildInputs = with self; [];
665 doCheck = false;
666 propagatedBuildInputs = with self; [pyramid pyramid-mako repoze.lru pygments];
667 src = fetchurl {
668 url = "https://pypi.python.org/packages/b0/c5/aae5d99983600146875d471aab9142b925fd3596e6e637f6c35d158d09cc/pyramid_debugtoolbar-3.0.4.tar.gz";
669 md5 = "51ff68a733ae994641027f10116e519d";
670 };
671 meta = {
672 license = [ { fullName = "Repoze Public License"; } pkgs.lib.licenses.bsdOriginal ];
673 };
674 };
675 pyramid-jinja2 = super.buildPythonPackage {
676 name = "pyramid-jinja2-2.6.2";
677 buildInputs = with self; [];
678 doCheck = false;
679 propagatedBuildInputs = with self; [pyramid zope.deprecation Jinja2 MarkupSafe];
680 src = fetchurl {
681 url = "https://pypi.python.org/packages/37/00/ac38702305dcf08fe1f1d6d882e8e2d957543bc96c62de52d99d43433c23/pyramid_jinja2-2.6.2.tar.gz";
682 md5 = "10ca075934ebf8f52acfc9898991966d";
683 };
684 meta = {
685 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
686 };
687 };
688 pyramid-mailer = super.buildPythonPackage {
689 name = "pyramid-mailer-0.14.1";
690 buildInputs = with self; [];
691 doCheck = false;
692 propagatedBuildInputs = with self; [pyramid repoze.sendmail];
693 src = fetchurl {
694 url = "https://pypi.python.org/packages/43/02/a32823750dbdee4280090843d5788cc550ab6f24f23fcabbeb7f912bf5fe/pyramid_mailer-0.14.1.tar.gz";
695 md5 = "a589801afdc4a3d64337e4cbd2fc7cdb";
696 };
697 meta = {
698 license = [ pkgs.lib.licenses.bsdOriginal ];
699 };
700 };
701 pyramid-mako = super.buildPythonPackage {
702 name = "pyramid-mako-1.0.2";
703 buildInputs = with self; [];
704 doCheck = false;
705 propagatedBuildInputs = with self; [pyramid Mako];
706 src = fetchurl {
707 url = "https://pypi.python.org/packages/f1/92/7e69bcf09676d286a71cb3bbb887b16595b96f9ba7adbdc239ffdd4b1eb9/pyramid_mako-1.0.2.tar.gz";
708 md5 = "ee25343a97eb76bd90abdc2a774eb48a";
709 };
710 meta = {
711 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
712 };
713 };
714 pyramid-redis-sessions = super.buildPythonPackage {
715 name = "pyramid-redis-sessions-1.0.1";
716 buildInputs = with self; [];
717 doCheck = false;
718 propagatedBuildInputs = with self; [redis pyramid];
719 src = fetchurl {
720 url = "https://pypi.python.org/packages/45/9b/905fd70bb603b61819d525efe7626342ad5f8d033e25fbaedbc53f458c37/pyramid_redis_sessions-1.0.1.tar.gz";
721 md5 = "a39bbfd36f61685eac32d5f4010d3fef";
722 };
723 meta = {
724 license = [ { fullName = "FreeBSD"; } ];
725 };
726 };
727 pyramid-tm = super.buildPythonPackage {
728 name = "pyramid-tm-0.12";
729 buildInputs = with self; [];
730 doCheck = false;
731 propagatedBuildInputs = with self; [pyramid transaction];
732 src = fetchurl {
733 url = "https://pypi.python.org/packages/3e/0b/a0fd3856c8ca2b30f20fcd26627b9cf9d91cd2cfabae42aee3441b2441c5/pyramid_tm-0.12.tar.gz";
734 md5 = "6e5f4449706855fdb7c63d2190e0209b";
735 };
736 meta = {
737 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
738 };
739 };
740 python-dateutil = super.buildPythonPackage {
741 name = "python-dateutil-2.5.3";
742 buildInputs = with self; [];
743 doCheck = false;
744 propagatedBuildInputs = with self; [six];
745 src = fetchurl {
746 url = "https://pypi.python.org/packages/3e/f5/aad82824b369332a676a90a8c0d1e608b17e740bbb6aeeebca726f17b902/python-dateutil-2.5.3.tar.gz";
747 md5 = "05ffc6d2cc85a7fd93bb245807f715ef";
748 };
749 meta = {
750 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "Simplified BSD"; } ];
751 };
752 };
753 python-editor = super.buildPythonPackage {
754 name = "python-editor-1.0.1";
755 buildInputs = with self; [];
756 doCheck = false;
757 propagatedBuildInputs = with self; [];
758 src = fetchurl {
759 url = "https://pypi.python.org/packages/2b/c0/df7b87d5cf016f82eab3b05cd35f53287c1178ad8c42bfb6fa61b89b22f6/python-editor-1.0.1.tar.gz";
760 md5 = "e1fa63535b40e022fa4fd646fd8b511a";
761 };
762 meta = {
763 license = [ pkgs.lib.licenses.asl20 { fullName = "Apache"; } ];
764 };
765 };
766 pytz = super.buildPythonPackage {
767 name = "pytz-2016.6.1";
768 buildInputs = with self; [];
769 doCheck = false;
770 propagatedBuildInputs = with self; [];
771 src = fetchurl {
772 url = "https://pypi.python.org/packages/5d/8e/6635d8f3f9f48c03bb925fab543383089858271f9cfd1216b83247e8df94/pytz-2016.6.1.tar.gz";
773 md5 = "b6c28a3b968bc1d8badfb61b93874e03";
774 };
775 meta = {
776 license = [ pkgs.lib.licenses.mit ];
777 };
778 };
779 redis = super.buildPythonPackage {
780 name = "redis-2.10.5";
781 buildInputs = with self; [];
782 doCheck = false;
783 propagatedBuildInputs = with self; [];
784 src = fetchurl {
785 url = "https://pypi.python.org/packages/68/44/5efe9e98ad83ef5b742ce62a15bea609ed5a0d1caf35b79257ddb324031a/redis-2.10.5.tar.gz";
786 md5 = "3b26c2b9703b4b56b30a1ad508e31083";
787 };
788 meta = {
789 license = [ pkgs.lib.licenses.mit ];
790 };
791 };
792 redlock-py = super.buildPythonPackage {
793 name = "redlock-py-1.0.8";
794 buildInputs = with self; [];
795 doCheck = false;
796 propagatedBuildInputs = with self; [redis];
797 src = fetchurl {
798 url = "https://pypi.python.org/packages/7c/40/29e1730f771b5d27e3c77b5426b6a67a3642868bf8bd592dfa6639feda98/redlock-py-1.0.8.tar.gz";
799 md5 = "7f8fe8ddefbe35deaa64d67ebdf1c58e";
800 };
801 meta = {
802 license = [ pkgs.lib.licenses.mit ];
803 };
804 };
805 repoze.lru = super.buildPythonPackage {
806 name = "repoze.lru-0.6";
807 buildInputs = with self; [];
808 doCheck = false;
809 propagatedBuildInputs = with self; [];
810 src = fetchurl {
811 url = "https://pypi.python.org/packages/6e/1e/aa15cc90217e086dc8769872c8778b409812ff036bf021b15795638939e4/repoze.lru-0.6.tar.gz";
812 md5 = "2c3b64b17a8e18b405f55d46173e14dd";
813 };
814 meta = {
815 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
816 };
817 };
818 repoze.sendmail = super.buildPythonPackage {
819 name = "repoze.sendmail-4.1";
820 buildInputs = with self; [];
821 doCheck = false;
822 propagatedBuildInputs = with self; [setuptools zope.interface transaction];
823 src = fetchurl {
824 url = "https://pypi.python.org/packages/6b/3a/501a897c036c7b728b02a2695998055755e9e71c7e135abdcf200958965e/repoze.sendmail-4.1.tar.gz";
825 md5 = "81d15f1f03cc67d6f56f2091c594ef57";
826 };
827 meta = {
828 license = [ pkgs.lib.licenses.zpt21 ];
829 };
830 };
831 requests = super.buildPythonPackage {
832 name = "requests-2.9.1";
833 buildInputs = with self; [];
834 doCheck = false;
835 propagatedBuildInputs = with self; [];
836 src = fetchurl {
837 url = "https://pypi.python.org/packages/f9/6d/07c44fb1ebe04d069459a189e7dab9e4abfe9432adcd4477367c25332748/requests-2.9.1.tar.gz";
838 md5 = "0b7f480d19012ec52bab78292efd976d";
839 };
840 meta = {
841 license = [ pkgs.lib.licenses.asl20 ];
842 };
843 };
844 requests-oauthlib = super.buildPythonPackage {
845 name = "requests-oauthlib-0.6.1";
846 buildInputs = with self; [];
847 doCheck = false;
848 propagatedBuildInputs = with self; [oauthlib requests];
849 src = fetchurl {
850 url = "https://pypi.python.org/packages/f9/98/a1aaae4bbcde0e98d6d853c4f08bd52f20b0005cefb881679bcdf7ea7a00/requests-oauthlib-0.6.1.tar.gz";
851 md5 = "f159bc7675ebe6a2d76798f4c00c5bf8";
852 };
853 meta = {
854 license = [ pkgs.lib.licenses.isc pkgs.lib.licenses.bsdOriginal ];
855 };
856 };
857 requests-toolbelt = super.buildPythonPackage {
858 name = "requests-toolbelt-0.7.0";
859 buildInputs = with self; [];
860 doCheck = false;
861 propagatedBuildInputs = with self; [requests];
862 src = fetchurl {
863 url = "https://pypi.python.org/packages/59/78/1d391d30ebf74079a8e4de6ab66fdca5362903ef2df64496f4697e9bb626/requests-toolbelt-0.7.0.tar.gz";
864 md5 = "bfe2009905f460f4764c32cfbbf4205f";
865 };
866 meta = {
867 license = [ pkgs.lib.licenses.asl20 ];
868 };
869 };
870 setuptools = super.buildPythonPackage {
871 name = "setuptools-27.2.0";
872 buildInputs = with self; [];
873 doCheck = false;
874 propagatedBuildInputs = with self; [];
875 src = fetchurl {
876 url = "https://pypi.python.org/packages/87/ba/54197971d107bc06f5f3fbdc0d728a7ae0b10cafca46acfddba65a0899d8/setuptools-27.2.0.tar.gz";
877 md5 = "b39715612fdc0372dbfd7b3fcf5d4fe5";
878 };
879 meta = {
880 license = [ pkgs.lib.licenses.mit ];
881 };
882 };
883 simplejson = super.buildPythonPackage {
884 name = "simplejson-3.8.2";
885 buildInputs = with self; [];
886 doCheck = false;
887 propagatedBuildInputs = with self; [];
888 src = fetchurl {
889 url = "https://pypi.python.org/packages/f0/07/26b519e6ebb03c2a74989f7571e6ae6b82e9d7d81b8de6fcdbfc643c7b58/simplejson-3.8.2.tar.gz";
890 md5 = "53b1371bbf883b129a12d594a97e9a18";
891 };
892 meta = {
893 license = [ { fullName = "Academic Free License (AFL)"; } pkgs.lib.licenses.mit ];
894 };
895 };
896 six = super.buildPythonPackage {
897 name = "six-1.9.0";
898 buildInputs = with self; [];
899 doCheck = false;
900 propagatedBuildInputs = with self; [];
901 src = fetchurl {
902 url = "https://pypi.python.org/packages/16/64/1dc5e5976b17466fd7d712e59cbe9fb1e18bec153109e5ba3ed6c9102f1a/six-1.9.0.tar.gz";
903 md5 = "476881ef4012262dfc8adc645ee786c4";
904 };
905 meta = {
906 license = [ pkgs.lib.licenses.mit ];
907 };
908 };
909 tlslite = super.buildPythonPackage {
910 name = "tlslite-0.4.9";
911 buildInputs = with self; [];
912 doCheck = false;
913 propagatedBuildInputs = with self; [];
914 src = fetchurl {
915 url = "https://pypi.python.org/packages/92/2b/7904cf913d9bf150b3e408a92c9cb5ce0b97a9ec19f998af48bf4c607f0e/tlslite-0.4.9.tar.gz";
916 md5 = "9f3b3797f595dd66cd36a65c83a87869";
917 };
918 meta = {
919 license = [ { fullName = "public domain and BSD"; } ];
920 };
921 };
922 transaction = super.buildPythonPackage {
923 name = "transaction-1.4.3";
924 buildInputs = with self; [];
925 doCheck = false;
926 propagatedBuildInputs = with self; [zope.interface];
927 src = fetchurl {
928 url = "https://pypi.python.org/packages/9d/9d/afb5c4904fb41edc14029744ff030ac0596846262bda6145edf23791c880/transaction-1.4.3.tar.gz";
929 md5 = "b4ca5983c9e3a0808ff5ff7648092c76";
930 };
931 meta = {
932 license = [ pkgs.lib.licenses.zpt21 ];
933 };
934 };
935 translationstring = super.buildPythonPackage {
936 name = "translationstring-1.3";
937 buildInputs = with self; [];
938 doCheck = false;
939 propagatedBuildInputs = with self; [];
940 src = fetchurl {
941 url = "https://pypi.python.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
942 md5 = "a4b62e0f3c189c783a1685b3027f7c90";
943 };
944 meta = {
945 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
946 };
947 };
948 urllib3 = super.buildPythonPackage {
949 name = "urllib3-1.17";
950 buildInputs = with self; [];
951 doCheck = false;
952 propagatedBuildInputs = with self; [];
953 src = fetchurl {
954 url = "https://pypi.python.org/packages/c2/79/8851583070bac203561d21b9478340535893f587759608156aaca60a615a/urllib3-1.17.tar.gz";
955 md5 = "12d5520f0fffed0e65cb66b5bdc6ddec";
956 };
957 meta = {
958 license = [ pkgs.lib.licenses.mit ];
959 };
960 };
961 venusian = super.buildPythonPackage {
962 name = "venusian-1.0";
963 buildInputs = with self; [];
964 doCheck = false;
965 propagatedBuildInputs = with self; [];
966 src = fetchurl {
967 url = "https://pypi.python.org/packages/86/20/1948e0dfc4930ddde3da8c33612f6a5717c0b4bc28f591a5c5cf014dd390/venusian-1.0.tar.gz";
968 md5 = "dccf2eafb7113759d60c86faf5538756";
969 };
970 meta = {
971 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
972 };
973 };
974 waitress = super.buildPythonPackage {
975 name = "waitress-1.0.0";
976 buildInputs = with self; [];
977 doCheck = false;
978 propagatedBuildInputs = with self; [];
979 src = fetchurl {
980 url = "https://pypi.python.org/packages/a5/c3/264a56b2470de29f35dda8369886663303c8a2294673b2e6b9975e59f471/waitress-1.0.0.tar.gz";
981 md5 = "b900c4d793e218d77742f47ece58dd43";
982 };
983 meta = {
984 license = [ pkgs.lib.licenses.zpt21 ];
985 };
986 };
987 webassets = super.buildPythonPackage {
988 name = "webassets-0.11.1";
989 buildInputs = with self; [];
990 doCheck = false;
991 propagatedBuildInputs = with self; [];
992 src = fetchurl {
993 url = "https://pypi.python.org/packages/0e/97/f0cd013a3ae074672e9fdfa8629e4071b5cc420a2c82bef5622a87631d1c/webassets-0.11.1.tar.gz";
994 md5 = "6acca51bd12fbdc0399ab1a9b67a1599";
995 };
996 meta = {
997 license = [ pkgs.lib.licenses.bsdOriginal ];
998 };
999 };
1000 webhelpers2 = super.buildPythonPackage {
1001 name = "webhelpers2-2.0";
1002 buildInputs = with self; [];
1003 doCheck = false;
1004 propagatedBuildInputs = with self; [MarkupSafe six];
1005 src = fetchurl {
1006 url = "https://pypi.python.org/packages/ff/30/56342c6ea522439e3662427c8d7b5e5b390dff4ff2dc92d8afcb8ab68b75/WebHelpers2-2.0.tar.gz";
1007 md5 = "0f6b68d70c12ee0aed48c00b24da13d3";
1008 };
1009 meta = {
1010 license = [ pkgs.lib.licenses.mit ];
1011 };
1012 };
1013 wtforms = super.buildPythonPackage {
1014 name = "wtforms-2.1";
1015 buildInputs = with self; [];
1016 doCheck = false;
1017 propagatedBuildInputs = with self; [];
1018 src = fetchurl {
1019 url = "https://pypi.python.org/packages/bf/91/2e553b86c55e9cf2f33265de50e052441fb753af46f5f20477fe9c61280e/WTForms-2.1.zip";
1020 md5 = "6938a541fafd1a1ae2f6b9b88588eef2";
1021 };
1022 meta = {
1023 license = [ pkgs.lib.licenses.bsdOriginal ];
1024 };
1025 };
1026 ziggurat-foundations = super.buildPythonPackage {
1027 name = "ziggurat-foundations-0.6.8";
1028 buildInputs = with self; [];
1029 doCheck = false;
1030 propagatedBuildInputs = with self; [SQLAlchemy passlib paginate paginate-sqlalchemy alembic six];
1031 src = fetchurl {
1032 url = "https://pypi.python.org/packages/b2/3c/f9a0112a30424a58fccdd357338b4559fdda9e1bb3c9611b1ad263abf49e/ziggurat_foundations-0.6.8.tar.gz";
1033 md5 = "d2cc7201667b0e01099456a77726179c";
1034 };
1035 meta = {
1036 license = [ pkgs.lib.licenses.bsdOriginal ];
1037 };
1038 };
1039 zope.deprecation = super.buildPythonPackage {
1040 name = "zope.deprecation-4.1.2";
1041 buildInputs = with self; [];
1042 doCheck = false;
1043 propagatedBuildInputs = with self; [setuptools];
1044 src = fetchurl {
1045 url = "https://pypi.python.org/packages/c1/d3/3919492d5e57d8dd01b36f30b34fc8404a30577392b1eb817c303499ad20/zope.deprecation-4.1.2.tar.gz";
1046 md5 = "e9a663ded58f4f9f7881beb56cae2782";
1047 };
1048 meta = {
1049 license = [ pkgs.lib.licenses.zpt21 ];
1050 };
1051 };
1052 zope.interface = super.buildPythonPackage {
1053 name = "zope.interface-4.3.2";
1054 buildInputs = with self; [];
1055 doCheck = false;
1056 propagatedBuildInputs = with self; [setuptools];
1057 src = fetchurl {
1058 url = "https://pypi.python.org/packages/38/1b/d55c39f2cf442bd9fb2c59760ed058c84b57d25c680819c25f3aff741e1f/zope.interface-4.3.2.tar.gz";
1059 md5 = "5f7e15a5bcdfa3c6c0e93ffe45caf87c";
1060 };
1061 meta = {
1062 license = [ pkgs.lib.licenses.zpt21 ];
1063 };
1064 };
1065 zope.sqlalchemy = super.buildPythonPackage {
1066 name = "zope.sqlalchemy-0.7.6";
1067 buildInputs = with self; [];
1068 doCheck = false;
1069 propagatedBuildInputs = with self; [setuptools SQLAlchemy transaction zope.interface];
1070 src = fetchurl {
1071 url = "https://pypi.python.org/packages/d0/e0/5df0d7f9f1955e2e2edecbb1367cf1fa76bc2f84d700661ffd4161c7e2e9/zope.sqlalchemy-0.7.6.zip";
1072 md5 = "0f5bf14679951e59007e090b6922688c";
1073 };
1074 meta = {
1075 license = [ pkgs.lib.licenses.zpt21 ];
1076 };
1077 };
1078
1079 ### Test requirements
1080
1081
1082 }
@@ -0,0 +1,1 b''
1 1.1.0
1 NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
@@ -1,220 +1,220 b''
1 1 # Created by .ignore support plugin (hsz.mobi)
2 2 ### Node template
3 3 # Logs
4 4 logs
5 5 *.log
6 6 npm-debug.log*
7 7 yarn-debug.log*
8 8 yarn-error.log*
9 9
10 10 # Runtime data
11 11 pids
12 12 *.pid
13 13 *.seed
14 14 *.pid.lock
15 15
16 16 # Directory for instrumented libs generated by jscoverage/JSCover
17 17 lib-cov
18 18
19 19 # Coverage directory used by tools like istanbul
20 20 coverage
21 21
22 22 # nyc test coverage
23 23 .nyc_output
24 24
25 25 # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files)
26 26 .grunt
27 27
28 28 # Bower dependency directory (https://bower.io/)
29 29 bower_components
30 30
31 31 # node-waf configuration
32 32 .lock-wscript
33 33
34 34 # Compiled binary addons (https://nodejs.org/api/addons.html)
35 35 build/Release
36 36
37 37 # Dependency directories
38 38 node_modules/
39 39 jspm_packages/
40 40
41 41 # Typescript v1 declaration files
42 42 typings/
43 43
44 44 # Optional npm cache directory
45 45 .npm
46 46
47 47 # Optional eslint cache
48 48 .eslintcache
49 49
50 50 # Optional REPL history
51 51 .node_repl_history
52 52
53 53 # Output of 'npm pack'
54 54 *.tgz
55 55
56 56 # Yarn Integrity file
57 57 .yarn-integrity
58 58
59 59 # dotenv environment variables file
60 60 .env
61 61
62 62 ### JetBrains template
63 63 # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and Webstorm
64 64 # Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
65 65
66 66 # User-specific stuff:
67 67 .idea/**/workspace.xml
68 68 .idea/**/tasks.xml
69 69 .idea/dictionaries
70 70
71 71 # Sensitive or high-churn files:
72 72 .idea/**/dataSources/
73 73 .idea/**/dataSources.ids
74 74 .idea/**/dataSources.xml
75 75 .idea/**/dataSources.local.xml
76 76 .idea/**/sqlDataSources.xml
77 77 .idea/**/dynamic.xml
78 78 .idea/**/uiDesigner.xml
79 79
80 80 # Gradle:
81 81 .idea/**/gradle.xml
82 82 .idea/**/libraries
83 83
84 84 # CMake
85 85 cmake-build-debug/
86 86
87 87 # Mongo Explorer plugin:
88 88 .idea/**/mongoSettings.xml
89 89
90 90 ## File-based project format:
91 91 *.iws
92 92
93 93 ## Plugin-specific files:
94 94
95 95 # IntelliJ
96 96 out/
97 97
98 98 # mpeltonen/sbt-idea plugin
99 99 .idea_modules/
100 100
101 101 # JIRA plugin
102 102 atlassian-ide-plugin.xml
103 103
104 104 # Cursive Clojure plugin
105 105 .idea/replstate.xml
106 106
107 107 # Crashlytics plugin (for Android Studio and IntelliJ)
108 108 com_crashlytics_export_strings.xml
109 109 crashlytics.properties
110 110 crashlytics-build.properties
111 111 fabric.properties
112 112 ### Python template
113 113 # Byte-compiled / optimized / DLL files
114 114 __pycache__/
115 115 *.py[cod]
116 116 *$py.class
117 117
118 118 # C extensions
119 119 *.so
120 120
121 121 # Distribution / packaging
122 122 .Python
123 123 build/
124 124 develop-eggs/
125 125 dist/
126 126 downloads/
127 127 eggs/
128 128 .eggs/
129 /lib/
130 /lib64/
129 lib/
130 lib64/
131 131 parts/
132 132 sdist/
133 133 var/
134 134 wheels/
135 135 *.egg-info/
136 136 .installed.cfg
137 137 *.egg
138 138 MANIFEST
139 139
140 140 # PyInstaller
141 141 # Usually these files are written by a python script from a template
142 142 # before PyInstaller builds the exe, so as to inject date/other infos into it.
143 143 *.manifest
144 144 *.spec
145 145
146 146 # Installer logs
147 147 pip-log.txt
148 148 pip-delete-this-directory.txt
149 149
150 150 # Unit test / coverage reports
151 151 htmlcov/
152 152 .tox/
153 153 .coverage
154 154 .coverage.*
155 155 .cache
156 156 nosetests.xml
157 157 coverage.xml
158 158 *.cover
159 159 .hypothesis/
160 160
161 161 # Translations
162 162 *.mo
163 163 *.pot
164 164
165 165 # Django stuff:
166 166 local_settings.py
167 167
168 168 # Flask stuff:
169 169 instance/
170 170 .webassets-cache
171 171
172 172 # Scrapy stuff:
173 173 .scrapy
174 174
175 175 # Sphinx documentation
176 176 docs/_build/
177 177
178 178 # PyBuilder
179 179 target/
180 180
181 181 # Jupyter Notebook
182 182 .ipynb_checkpoints
183 183
184 184 # pyenv
185 185 .python-version
186 186
187 187 # celery beat schedule file
188 188 celerybeat-schedule
189 189
190 190 # SageMath parsed files
191 191 *.sage.py
192 192
193 193 # Environments
194 194 .venv
195 195 env/
196 196 venv/
197 197 ENV/
198 198 env.bak/
199 199 venv.bak/
200 200
201 201 # Spyder project settings
202 202 .spyderproject
203 203 .spyproject
204 204
205 205 # Rope project settings
206 206 .ropeproject
207 207
208 208 # mkdocs documentation
209 209 /site
210 210
211 211 # mypy
212 212 .mypy_cache/
213 213 ### Example user template template
214 214 ### Example user template
215 215
216 216 # IntelliJ project files
217 217 .idea
218 218 *.iml
219 219 out
220 220 gen
@@ -1,214 +1,219 b''
1 This program is free software: you can redistribute it and/or modify
2 it under the terms of the GNU Affero General Public License, version 3
3 (only), as published by the Free Software Foundation.
4
5
1 6 This program incorporates work covered by the following copyright and
2 7 permission notice:
3 8
4 9 Copyright (c) 2014-2016 - packaging
5 10 file:
6 11 Copyright (c) 2008-2011 - msgpack-python
7 12 file:licenses/msgpack_license.txt
8 13 Copyright (c) 2007-2008 - amqp
9 14 file:licenses/amqp_license.txt
10 15 Copyright (c) 2013 - bcrypt
11 16 file:licenses/bcrypt_license.txt
12 17 Copyright (c) 2015 - elasticsearch
13 18 file:licenses/elasticsearch_license.txt
14 19 Copyright (c) 2011-2013 - gevent-websocket
15 20 file:licenses/gevent_websocket_license.txt
16 21 Copyright (c) 2015 - python-editor
17 22 file:licenses/python_editor_license.txt
18 23 Copyright (c) 2015 - requests
19 24 file:licenses/requests_license.txt
20 25 Copyright (c) 2014 - requests-toolbelt
21 26 file:licenses/requests_toolbelt_license.txt
22 27
23 28 Both licensed under the Apache License, Version 2.0 (the "License");
24 29 you may not use this file except in compliance with the License.
25 30 You may obtain a copy of the License at
26 31
27 32 http://www.apache.org/licenses/LICENSE-2.0
28 33
29 34 Unless required by applicable law or agreed to in writing, software
30 35 distributed under the License is distributed on an "AS IS" BASIS,
31 36 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
32 37 See the License for the specific language governing permissions and
33 38 imitations under the License.
34 39
35 40
36 41 Below is the full text of Apache License, version 2.0
37 42
38 43
39 44 Apache License
40 45 Version 2.0, January 2004
41 46 http://www.apache.org/licenses/
42 47
43 48 TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
44 49
45 50 1. Definitions.
46 51
47 52 "License" shall mean the terms and conditions for use, reproduction,
48 53 and distribution as defined by Sections 1 through 9 of this document.
49 54
50 55 "Licensor" shall mean the copyright owner or entity authorized by
51 56 the copyright owner that is granting the License.
52 57
53 58 "Legal Entity" shall mean the union of the acting entity and all
54 59 other entities that control, are controlled by, or are under common
55 60 control with that entity. For the purposes of this definition,
56 61 "control" means (i) the power, direct or indirect, to cause the
57 62 direction or management of such entity, whether by contract or
58 63 otherwise, or (ii) ownership of fifty percent (50%) or more of the
59 64 outstanding shares, or (iii) beneficial ownership of such entity.
60 65
61 66 "You" (or "Your") shall mean an individual or Legal Entity
62 67 exercising permissions granted by this License.
63 68
64 69 "Source" form shall mean the preferred form for making modifications,
65 70 including but not limited to software source code, documentation
66 71 source, and configuration files.
67 72
68 73 "Object" form shall mean any form resulting from mechanical
69 74 transformation or translation of a Source form, including but
70 75 not limited to compiled object code, generated documentation,
71 76 and conversions to other media types.
72 77
73 78 "Work" shall mean the work of authorship, whether in Source or
74 79 Object form, made available under the License, as indicated by a
75 80 copyright notice that is included in or attached to the work
76 81 (an example is provided in the Appendix below).
77 82
78 83 "Derivative Works" shall mean any work, whether in Source or Object
79 84 form, that is based on (or derived from) the Work and for which the
80 85 editorial revisions, annotations, elaborations, or other modifications
81 86 represent, as a whole, an original work of authorship. For the purposes
82 87 of this License, Derivative Works shall not include works that remain
83 88 separable from, or merely link (or bind by name) to the interfaces of,
84 89 the Work and Derivative Works thereof.
85 90
86 91 "Contribution" shall mean any work of authorship, including
87 92 the original version of the Work and any modifications or additions
88 93 to that Work or Derivative Works thereof, that is intentionally
89 94 submitted to Licensor for inclusion in the Work by the copyright owner
90 95 or by an individual or Legal Entity authorized to submit on behalf of
91 96 the copyright owner. For the purposes of this definition, "submitted"
92 97 means any form of electronic, verbal, or written communication sent
93 98 to the Licensor or its representatives, including but not limited to
94 99 communication on electronic mailing lists, source code control systems,
95 100 and issue tracking systems that are managed by, or on behalf of, the
96 101 Licensor for the purpose of discussing and improving the Work, but
97 102 excluding communication that is conspicuously marked or otherwise
98 103 designated in writing by the copyright owner as "Not a Contribution."
99 104
100 105 "Contributor" shall mean Licensor and any individual or Legal Entity
101 106 on behalf of whom a Contribution has been received by Licensor and
102 107 subsequently incorporated within the Work.
103 108
104 109 2. Grant of Copyright License. Subject to the terms and conditions of
105 110 this License, each Contributor hereby grants to You a perpetual,
106 111 worldwide, non-exclusive, no-charge, royalty-free, irrevocable
107 112 copyright license to reproduce, prepare Derivative Works of,
108 113 publicly display, publicly perform, sublicense, and distribute the
109 114 Work and such Derivative Works in Source or Object form.
110 115
111 116 3. Grant of Patent License. Subject to the terms and conditions of
112 117 this License, each Contributor hereby grants to You a perpetual,
113 118 worldwide, non-exclusive, no-charge, royalty-free, irrevocable
114 119 (except as stated in this section) patent license to make, have made,
115 120 use, offer to sell, sell, import, and otherwise transfer the Work,
116 121 where such license applies only to those patent claims licensable
117 122 by such Contributor that are necessarily infringed by their
118 123 Contribution(s) alone or by combination of their Contribution(s)
119 124 with the Work to which such Contribution(s) was submitted. If You
120 125 institute patent litigation against any entity (including a
121 126 cross-claim or counterclaim in a lawsuit) alleging that the Work
122 127 or a Contribution incorporated within the Work constitutes direct
123 128 or contributory patent infringement, then any patent licenses
124 129 granted to You under this License for that Work shall terminate
125 130 as of the date such litigation is filed.
126 131
127 132 4. Redistribution. You may reproduce and distribute copies of the
128 133 Work or Derivative Works thereof in any medium, with or without
129 134 modifications, and in Source or Object form, provided that You
130 135 meet the following conditions:
131 136
132 137 (a) You must give any other recipients of the Work or
133 138 Derivative Works a copy of this License; and
134 139
135 140 (b) You must cause any modified files to carry prominent notices
136 141 stating that You changed the files; and
137 142
138 143 (c) You must retain, in the Source form of any Derivative Works
139 144 that You distribute, all copyright, patent, trademark, and
140 145 attribution notices from the Source form of the Work,
141 146 excluding those notices that do not pertain to any part of
142 147 the Derivative Works; and
143 148
144 149 (d) If the Work includes a "NOTICE" text file as part of its
145 150 distribution, then any Derivative Works that You distribute must
146 151 include a readable copy of the attribution notices contained
147 152 within such NOTICE file, excluding those notices that do not
148 153 pertain to any part of the Derivative Works, in at least one
149 154 of the following places: within a NOTICE text file distributed
150 155 as part of the Derivative Works; within the Source form or
151 156 documentation, if provided along with the Derivative Works; or,
152 157 within a display generated by the Derivative Works, if and
153 158 wherever such third-party notices normally appear. The contents
154 159 of the NOTICE file are for informational purposes only and
155 160 do not modify the License. You may add Your own attribution
156 161 notices within Derivative Works that You distribute, alongside
157 162 or as an addendum to the NOTICE text from the Work, provided
158 163 that such additional attribution notices cannot be construed
159 164 as modifying the License.
160 165
161 166 You may add Your own copyright statement to Your modifications and
162 167 may provide additional or different license terms and conditions
163 168 for use, reproduction, or distribution of Your modifications, or
164 169 for any such Derivative Works as a whole, provided Your use,
165 170 reproduction, and distribution of the Work otherwise complies with
166 171 the conditions stated in this License.
167 172
168 173 5. Submission of Contributions. Unless You explicitly state otherwise,
169 174 any Contribution intentionally submitted for inclusion in the Work
170 175 by You to the Licensor shall be under the terms and conditions of
171 176 this License, without any additional terms or conditions.
172 177 Notwithstanding the above, nothing herein shall supersede or modify
173 178 the terms of any separate license agreement you may have executed
174 179 with Licensor regarding such Contributions.
175 180
176 181 6. Trademarks. This License does not grant permission to use the trade
177 182 names, trademarks, service marks, or product names of the Licensor,
178 183 except as required for reasonable and customary use in describing the
179 184 origin of the Work and reproducing the content of the NOTICE file.
180 185
181 186 7. Disclaimer of Warranty. Unless required by applicable law or
182 187 agreed to in writing, Licensor provides the Work (and each
183 188 Contributor provides its Contributions) on an "AS IS" BASIS,
184 189 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
185 190 implied, including, without limitation, any warranties or conditions
186 191 of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
187 192 PARTICULAR PURPOSE. You are solely responsible for determining the
188 193 appropriateness of using or redistributing the Work and assume any
189 194 risks associated with Your exercise of permissions under this License.
190 195
191 196 8. Limitation of Liability. In no event and under no legal theory,
192 197 whether in tort (including negligence), contract, or otherwise,
193 198 unless required by applicable law (such as deliberate and grossly
194 199 negligent acts) or agreed to in writing, shall any Contributor be
195 200 liable to You for damages, including any direct, indirect, special,
196 201 incidental, or consequential damages of any character arising as a
197 202 result of this License or out of the use or inability to use the
198 203 Work (including but not limited to damages for loss of goodwill,
199 204 work stoppage, computer failure or malfunction, or any and all
200 205 other commercial damages or losses), even if such Contributor
201 206 has been advised of the possibility of such damages.
202 207
203 208 9. Accepting Warranty or Additional Liability. While redistributing
204 209 the Work or Derivative Works thereof, You may choose to offer,
205 210 and charge a fee for, acceptance of support, warranty, indemnity,
206 211 or other liability obligations and/or rights consistent with this
207 212 License. However, in accepting such obligations, You may act only
208 213 on Your own behalf and on Your sole responsibility, not on behalf
209 214 of any other Contributor, and only if You agree to indemnify,
210 215 defend, and hold each Contributor harmless for any liability
211 216 incurred by, or claims asserted against, such Contributor by reason
212 217 of your accepting any such warranty or additional liability.
213 218
214 219 END OF TERMS AND CONDITIONS
@@ -1,9 +1,99 b''
1 # AppEnlight
1 AppEnlight
2 -----------
2 3
3 Performance, exception, and uptime monitoring for the Web
4 Automatic Installation
5 ======================
4 6
5 ![AppEnlight image](https://raw.githubusercontent.com/AppEnlight/appenlight/gh-pages/static/appenlight.png)
7 Use the ansible scripts in the `automation` repository to build complete instance of application
8 You can also use `packer` files in `automation/packer` to create whole VM's for KVM and VMWare.
6 9
7 Visit:
10 Manual Installation
11 ===================
8 12
9 [Readme moved to backend directory](backend/README.md)
13 To run the app you need to have meet prerequsites:
14
15 - python 3.5+
16 - running elasticsearch (2.3+/2.4 tested)
17 - running postgresql (9.5+ required)
18 - running redis
19
20 Install the app by performing
21
22 pip install -r requirements.txt
23
24 python setup.py develop
25
26 Install the appenlight uptime plugin (`ae_uptime_ce` package from `appenlight-uptime-ce` repository).
27
28 After installing the application you need to perform following steps:
29
30 1. (optional) generate production.ini (or use a copy of development.ini)
31
32
33 appenlight-make-config production.ini
34
35 2. Setup database structure:
36
37
38 appenlight-migratedb -c FILENAME.ini
39
40 3. To configure elasticsearch:
41
42
43 appenlight-reindex-elasticsearch -t all -c FILENAME.ini
44
45 4. Create base database objects
46
47 (run this command with help flag to see how to create administrator user)
48
49
50 appenlight-initializedb -c FILENAME.ini
51
52 5. Generate static assets
53
54
55 appenlight-static -c FILENAME.ini
56
57 Running application
58 ===================
59
60 To run the main app:
61
62 pserve development.ini
63
64 To run celery workers:
65
66 celery worker -A appenlight.celery -Q "reports,logs,metrics,default" --ini FILENAME.ini
67
68 To run celery beat:
69
70 celery beat -A appenlight.celery --ini FILENAME.ini
71
72 To run appenlight's uptime plugin:
73
74 appenlight-uptime-monitor -c FILENAME.ini
75
76 Real-time Notifications
77 =======================
78
79 You should also run the `channelstream websocket server for real-time notifications
80
81 channelstream -i filename.ini
82
83 Testing
84 =======
85
86 To run test suite:
87
88 py.test appenlight/tests/tests.py --cov appenlight (this looks for testing.ini in repo root)
89
90
91 Development
92 ===========
93
94 To develop appenlight frontend:
95
96 cd frontend
97 npm install
98 bower install
99 grunt watch
@@ -1,2 +1,2 b''
1 include *.txt *.ini *.cfg *.rst *.md VERSION
2 recursive-include src *.ico *.png *.css *.gif *.jpg *.pt *.txt *.mak *.mako *.js *.html *.xml *.jinja2 *.rst *.otf *.ttf *.svg *.woff *.woff2 *.eot
1 include *.txt *.ini *.cfg *.rst *.md
2 recursive-include appenlight *.ico *.png *.css *.gif *.jpg *.pt *.txt *.mak *.mako *.js *.html *.xml *.jinja2 *.rst *.otf *.ttf *.svg *.woff *.eot
@@ -1,47 +1,49 b''
1 repoze.sendmail==4.4.1
2 pyramid==1.10.2
3 pyramid_tm==2.2.1
1 repoze.sendmail==4.1
2 pyramid==1.7.3
3 pyramid_tm==0.12
4 4 pyramid_debugtoolbar
5 5 pyramid_authstack==1.0.1
6 SQLAlchemy==1.2.18
7 alembic==1.0.8
6 SQLAlchemy==1.0.12
7 alembic==0.8.6
8 8 webhelpers2==2.0
9 transaction==2.4.0
10 zope.sqlalchemy==1.1
11 pyramid_mailer==0.15.1
12 redis==3.2.1
9 transaction==1.4.3
10 zope.sqlalchemy==0.7.6
11 pyramid_mailer==0.14.1
12 redis==2.10.5
13 13 redlock-py==1.0.8
14 pyramid_jinja2==2.8
15 psycopg2-binary==2.7.7
16 wtforms==2.2.1
17 celery==4.2.1
18 formencode==1.3.1
19 psutil==5.6.1
20 ziggurat_foundations==0.8.3
21 bcrypt==3.1.6
14 pyramid_jinja2==2.6.2
15 psycopg2==2.6.1
16 wtforms==2.1
17 celery==3.1.23
18 formencode==1.3.0
19 psutil==2.1.2
20 ziggurat_foundations>=0.6.7
21 bcrypt==2.0.0
22 22 appenlight_client
23 markdown==3.0.1
24 colander==1.7
25 defusedxml==0.5.0
26 dogpile.cache==0.7.1
23 markdown==2.5
24 colander==1.2
25 defusedxml==0.4.1
26 dogpile.cache==0.5.7
27 27 pyramid_redis_sessions==1.0.1
28 simplejson==3.16.0
29 waitress==1.2.1
30 gunicorn==19.9.0
31 uwsgi==2.0.18
32 requests==2.21.0
33 requests_oauthlib==1.2.0
34 gevent==1.4.0
35 pygments==2.3.1
36 lxml==4.3.2
37 paginate==0.5.6
38 paginate-sqlalchemy==0.3.0
39 elasticsearch>=6.0.0,<7.0.0
28 simplejson==3.8.2
29 waitress==1.0
30 gunicorn==19.4.5
31 requests==2.9.1
32 requests_oauthlib==0.6.1
33 gevent==1.1.1
34 gevent-websocket==0.9.5
35 pygments==2.1.3
36 lxml==3.6.0
37 paginate==0.5.4
38 paginate-sqlalchemy==0.2.0
39 pyelasticsearch==1.4
40 six==1.9.0
40 41 mock==1.0.1
41 itsdangerous==1.1.0
42 itsdangerous==0.24
42 43 camplight==0.9.6
43 44 jira==1.0.7
44 45 python-dateutil==2.5.3
45 46 authomatic==0.1.0.post1
46 cryptography==2.6.1
47 cryptography==1.2.3
48 webassets==0.11.1
47 49
@@ -1,99 +1,77 b''
1 1 import os
2 import sys
2 3 import re
3 4
4 5 from setuptools import setup, find_packages
5 6
6 7 here = os.path.abspath(os.path.dirname(__file__))
7 README = open(os.path.join(here, "README.md")).read()
8 CHANGES = open(os.path.join(here, "CHANGELOG.md")).read()
8 README = open(os.path.join(here, '..', 'README.md')).read()
9 CHANGES = open(os.path.join(here, 'CHANGELOG.rst')).read()
9 10
10 REQUIREMENTS = open(os.path.join(here, "requirements.txt")).readlines()
11 REQUIREMENTS = open(os.path.join(here, 'requirements.txt')).readlines()
11 12
12 compiled = re.compile("([^=><]*).*")
13 compiled = re.compile('([^=><]*).*')
13 14
14 15
15 16 def parse_req(req):
16 17 return compiled.search(req).group(1).strip()
17 18
18 19
19 if "APPENLIGHT_DEVELOP" in os.environ:
20 requires = [_f for _f in map(parse_req, REQUIREMENTS) if _f]
21 else:
22 requires = REQUIREMENTS
20 requires = [_f for _f in map(parse_req, REQUIREMENTS) if _f]
23 21
24 22
25 23 def _get_meta_var(name, data, callback_handler=None):
26 24 import re
27
28 matches = re.compile(r"(?:%s)\s*=\s*(.*)" % name).search(data)
25 matches = re.compile(r'(?:%s)\s*=\s*(.*)' % name).search(data)
29 26 if matches:
30 27 if not callable(callback_handler):
31 28 callback_handler = lambda v: v
32 29
33 30 return callback_handler(eval(matches.groups()[0]))
34 31
35
36 with open(os.path.join(here, "src", "appenlight", "__init__.py"), "r") as _meta:
32 with open(os.path.join(here, 'src', 'appenlight', '__init__.py'), 'r') as _meta:
37 33 _metadata = _meta.read()
38 34
39 __license__ = _get_meta_var("__license__", _metadata)
40 __author__ = _get_meta_var("__author__", _metadata)
41 __url__ = _get_meta_var("__url__", _metadata)
42
43 found_packages = find_packages("src")
44 found_packages.append("appenlight.migrations")
45 found_packages.append("appenlight.migrations.versions")
46 setup(
47 name="appenlight",
48 description="appenlight",
49 long_description=README,
50 classifiers=[
51 "Framework :: Pyramid",
52 "License :: OSI Approved :: Apache Software License",
53 "Programming Language :: Python",
54 "Programming Language :: Python :: 3 :: Only",
55 "Programming Language :: Python :: 3.6",
56 "Topic :: System :: Monitoring",
57 "Topic :: Software Development",
58 "Topic :: Software Development :: Bug Tracking",
59 "Topic :: Internet :: Log Analysis",
60 "Topic :: Internet :: WWW/HTTP",
61 "Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
62 ],
63 version="2.0.0rc1",
64 license=__license__,
65 author=__author__,
66 url="https://github.com/AppEnlight/appenlight",
67 keywords="web wsgi bfg pylons pyramid flask django monitoring apm instrumentation appenlight",
68 python_requires=">=3.5",
69 long_description_content_type="text/markdown",
70 package_dir={"": "src"},
71 packages=found_packages,
72 include_package_data=True,
73 zip_safe=False,
74 test_suite="appenlight",
75 install_requires=requires,
76 extras_require={
77 "dev": [
78 "coverage",
79 "pytest",
80 "pyramid",
81 "tox",
82 "mock",
83 "pytest-mock",
84 "webtest",
85 ],
86 "lint": ["black"],
87 },
88 entry_points={
89 "paste.app_factory": ["main = appenlight:main"],
90 "console_scripts": [
91 "appenlight-cleanup = appenlight.scripts.cleanup:main",
92 "appenlight-initializedb = appenlight.scripts.initialize_db:main",
93 "appenlight-migratedb = appenlight.scripts.migratedb:main",
94 "appenlight-reindex-elasticsearch = appenlight.scripts.reindex_elasticsearch:main",
95 "appenlight-static = appenlight.scripts.static:main",
96 "appenlight-make-config = appenlight.scripts.make_config:main",
97 ],
98 },
99 )
35 with open(os.path.join('src', 'appenlight', 'VERSION')) as _meta_version:
36 __version__ = _meta_version.read().strip()
37
38 __license__ = _get_meta_var('__license__', _metadata)
39 __author__ = _get_meta_var('__author__', _metadata)
40 __url__ = _get_meta_var('__url__', _metadata)
41
42 found_packages = find_packages('src')
43 found_packages.append('appenlight.migrations.versions')
44 setup(name='appenlight',
45 description='appenlight',
46 long_description=README + '\n\n' + CHANGES,
47 classifiers=[
48 "Programming Language :: Python",
49 "Framework :: Pylons",
50 "Topic :: Internet :: WWW/HTTP",
51 "Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
52 ],
53 version=__version__,
54 license=__license__,
55 author=__author__,
56 url=__url__,
57 keywords='web wsgi bfg pylons pyramid',
58 package_dir={'': 'src'},
59 packages=found_packages,
60 include_package_data=True,
61 zip_safe=False,
62 test_suite='appenlight',
63 install_requires=requires,
64 entry_points={
65 'paste.app_factory': [
66 'main = appenlight:main'
67 ],
68 'console_scripts': [
69 'appenlight-cleanup = appenlight.scripts.cleanup:main',
70 'appenlight-initializedb = appenlight.scripts.initialize_db:main',
71 'appenlight-migratedb = appenlight.scripts.migratedb:main',
72 'appenlight-reindex-elasticsearch = appenlight.scripts.reindex_elasticsearch:main',
73 'appenlight-static = appenlight.scripts.static:main',
74 'appenlight-make-config = appenlight.scripts.make_config:main',
75 ]
76 }
77 )
@@ -1,254 +1,222 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors
4 4 #
5 5 # Licensed under the Apache License, Version 2.0 (the "License");
6 6 # you may not use this file except in compliance with the License.
7 7 # You may obtain a copy of the License at
8 8 #
9 9 # http://www.apache.org/licenses/LICENSE-2.0
10 10 #
11 11 # Unless required by applicable law or agreed to in writing, software
12 12 # distributed under the License is distributed on an "AS IS" BASIS,
13 13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 14 # See the License for the specific language governing permissions and
15 15 # limitations under the License.
16 16
17 17 import datetime
18 18 import logging
19 from elasticsearch import Elasticsearch
19 import pyelasticsearch
20 20 import redis
21 21 import os
22 import pkg_resources
23 22 from pkg_resources import iter_entry_points
24 23
25 24 import appenlight.lib.jinja2_filters as jinja2_filters
26 25 import appenlight.lib.encryption as encryption
27 26
28 27 from pyramid.config import PHASE3_CONFIG
29 28 from pyramid.authentication import AuthTktAuthenticationPolicy
30 29 from pyramid.authorization import ACLAuthorizationPolicy
31 from pyramid_mailer.interfaces import IMailer
30 from pyramid_mailer.mailer import Mailer
32 31 from pyramid.renderers import JSON
33 32 from pyramid_redis_sessions import session_factory_from_settings
34 33 from pyramid.settings import asbool, aslist
35 34 from pyramid.security import AllPermissionsList
36 35 from pyramid_authstack import AuthenticationStackPolicy
37 36 from redlock import Redlock
38 37 from sqlalchemy import engine_from_config
39 38
40 39 from appenlight.celery import configure_celery
41 from appenlight.lib.configurator import (
42 CythonCompatConfigurator,
43 register_appenlight_plugin,
44 )
40 from appenlight.lib.configurator import (CythonCompatConfigurator,
41 register_appenlight_plugin)
45 42 from appenlight.lib import cache_regions
46 43 from appenlight.lib.ext_json import json
47 44 from appenlight.security import groupfinder, AuthTokenAuthenticationPolicy
48 45
49 __license__ = "Apache 2.0"
50 __author__ = "RhodeCode GmbH"
51 __url__ = "http://rhodecode.com"
52 __version__ = pkg_resources.get_distribution("appenlight").parsed_version
46 __license__ = 'Apache 2.0'
47 __author__ = 'RhodeCode GmbH'
48 __url__ = 'http://rhodecode.com'
53 49
54 50 json_renderer = JSON(serializer=json.dumps, indent=4)
55 51
56 52 log = logging.getLogger(__name__)
57 53
58 54
59 55 def datetime_adapter(obj, request):
60 56 return obj.isoformat()
61 57
62 58
63 59 def all_permissions_adapter(obj, request):
64 return "__all_permissions__"
60 return '__all_permissions__'
65 61
66 62
67 63 json_renderer.add_adapter(datetime.datetime, datetime_adapter)
68 64 json_renderer.add_adapter(AllPermissionsList, all_permissions_adapter)
69 65
70 66
71 67 def main(global_config, **settings):
72 68 """ This function returns a Pyramid WSGI application.
73 69 """
74 70 auth_tkt_policy = AuthTktAuthenticationPolicy(
75 settings["authtkt.secret"],
76 hashalg="sha512",
71 settings['authtkt.secret'],
72 hashalg='sha512',
77 73 callback=groupfinder,
78 74 max_age=2592000,
79 secure=asbool(settings.get("authtkt.secure", "false")),
75 secure=asbool(settings.get('authtkt.secure', 'false')))
76 auth_token_policy = AuthTokenAuthenticationPolicy(
77 callback=groupfinder
80 78 )
81 auth_token_policy = AuthTokenAuthenticationPolicy(callback=groupfinder)
82 79 authorization_policy = ACLAuthorizationPolicy()
83 80 authentication_policy = AuthenticationStackPolicy()
84 authentication_policy.add_policy("auth_tkt", auth_tkt_policy)
85 authentication_policy.add_policy("auth_token", auth_token_policy)
81 authentication_policy.add_policy('auth_tkt', auth_tkt_policy)
82 authentication_policy.add_policy('auth_token', auth_token_policy)
86 83 # set crypto key
87 encryption.ENCRYPTION_SECRET = settings.get("encryption_secret")
84 encryption.ENCRYPTION_SECRET = settings.get('encryption_secret')
88 85 # import this later so encyption key can be monkeypatched
89 86 from appenlight.models import DBSession, register_datastores
90 87
91 88 # registration
92 settings["appenlight.disable_registration"] = asbool(
93 settings.get("appenlight.disable_registration")
94 )
89 settings['appenlight.disable_registration'] = asbool(
90 settings.get('appenlight.disable_registration'))
95 91
96 92 # update config with cometd info
97 settings["cometd_servers"] = {
98 "server": settings["cometd.server"],
99 "secret": settings["cometd.secret"],
100 }
93 settings['cometd_servers'] = {'server': settings['cometd.server'],
94 'secret': settings['cometd.secret']}
101 95
102 96 # Create the Pyramid Configurator.
103 settings["_mail_url"] = settings["mailing.app_url"]
97 settings['_mail_url'] = settings['mailing.app_url']
104 98 config = CythonCompatConfigurator(
105 99 settings=settings,
106 100 authentication_policy=authentication_policy,
107 101 authorization_policy=authorization_policy,
108 root_factory="appenlight.security.RootFactory",
109 default_permission="view",
110 )
102 root_factory='appenlight.security.RootFactory',
103 default_permission='view')
111 104 # custom registry variables
112 105
113 106 # resource type information
114 config.registry.resource_types = ["resource", "application"]
107 config.registry.resource_types = ['resource', 'application']
115 108 # plugin information
116 109 config.registry.appenlight_plugins = {}
117 110
118 config.set_default_csrf_options(require_csrf=True, header="X-XSRF-TOKEN")
119 config.add_view_deriver("appenlight.predicates.csrf_view", name="csrf_view")
111 config.set_default_csrf_options(require_csrf=True, header='X-XSRF-TOKEN')
112 config.add_view_deriver('appenlight.predicates.csrf_view',
113 name='csrf_view')
120 114
121 115 # later, when config is available
122 dogpile_config = {
123 "url": settings["redis.url"],
124 "redis_expiration_time": 86400,
125 "redis_distributed_lock": True,
126 }
116 dogpile_config = {'url': settings['redis.url'],
117 "redis_expiration_time": 86400,
118 "redis_distributed_lock": True}
127 119 cache_regions.regions = cache_regions.CacheRegions(dogpile_config)
128 120 config.registry.cache_regions = cache_regions.regions
129 engine = engine_from_config(settings, "sqlalchemy.", json_serializer=json.dumps)
121 engine = engine_from_config(settings, 'sqlalchemy.',
122 json_serializer=json.dumps)
130 123 DBSession.configure(bind=engine)
131 124
132 125 # json rederer that serializes datetime
133 config.add_renderer("json", json_renderer)
134 config.add_request_method(
135 "appenlight.lib.request.es_conn", "es_conn", property=True
136 )
137 config.add_request_method(
138 "appenlight.lib.request.get_user", "user", reify=True, property=True
139 )
140 config.add_request_method(
141 "appenlight.lib.request.get_csrf_token", "csrf_token", reify=True, property=True
142 )
143 config.add_request_method(
144 "appenlight.lib.request.safe_json_body",
145 "safe_json_body",
146 reify=True,
147 property=True,
148 )
149 config.add_request_method(
150 "appenlight.lib.request.unsafe_json_body",
151 "unsafe_json_body",
152 reify=True,
153 property=True,
154 )
155 config.add_request_method(
156 "appenlight.lib.request.add_flash_to_headers", "add_flash_to_headers"
157 )
158 config.add_request_method(
159 "appenlight.lib.request.get_authomatic", "authomatic", reify=True
160 )
161
162 config.include("pyramid_redis_sessions")
163 config.include("pyramid_tm")
164 config.include("pyramid_jinja2")
165 config.include("pyramid_mailer")
166 config.include("appenlight_client.ext.pyramid_tween")
167 config.include("ziggurat_foundations.ext.pyramid.sign_in")
168 es_server_list = aslist(settings["elasticsearch.nodes"])
169 redis_url = settings["redis.url"]
170 log.warning("Elasticsearch server list: {}".format(es_server_list))
171 log.warning("Redis server: {}".format(redis_url))
172 config.registry.es_conn = Elasticsearch(es_server_list)
126 config.add_renderer('json', json_renderer)
127 config.set_request_property('appenlight.lib.request.es_conn', 'es_conn')
128 config.set_request_property('appenlight.lib.request.get_user', 'user',
129 reify=True)
130 config.set_request_property('appenlight.lib.request.get_csrf_token',
131 'csrf_token', reify=True)
132 config.set_request_property('appenlight.lib.request.safe_json_body',
133 'safe_json_body', reify=True)
134 config.set_request_property('appenlight.lib.request.unsafe_json_body',
135 'unsafe_json_body', reify=True)
136 config.add_request_method('appenlight.lib.request.add_flash_to_headers',
137 'add_flash_to_headers')
138 config.add_request_method('appenlight.lib.request.get_authomatic',
139 'authomatic', reify=True)
140
141 config.include('pyramid_redis_sessions')
142 config.include('pyramid_tm')
143 config.include('pyramid_jinja2')
144 config.include('appenlight_client.ext.pyramid_tween')
145 config.include('ziggurat_foundations.ext.pyramid.sign_in')
146 es_server_list = aslist(settings['elasticsearch.nodes'])
147 redis_url = settings['redis.url']
148 log.warning('Elasticsearch server list: {}'.format(es_server_list))
149 log.warning('Redis server: {}'.format(redis_url))
150 config.registry.es_conn = pyelasticsearch.ElasticSearch(es_server_list)
173 151 config.registry.redis_conn = redis.StrictRedis.from_url(redis_url)
174 152
175 config.registry.redis_lockmgr = Redlock(
176 [settings["redis.redlock.url"]], retry_count=0, retry_delay=0
177 )
178 # mailer bw compat
179 config.registry.mailer = config.registry.getUtility(IMailer)
153 config.registry.redis_lockmgr = Redlock([settings['redis.redlock.url'], ],
154 retry_count=0, retry_delay=0)
155 # mailer
156 config.registry.mailer = Mailer.from_settings(settings)
180 157
181 158 # Configure sessions
182 159 session_factory = session_factory_from_settings(settings)
183 160 config.set_session_factory(session_factory)
184 161
185 162 # Configure renderers and event subscribers
186 config.add_jinja2_extension("jinja2.ext.loopcontrols")
187 config.add_jinja2_search_path("appenlight:templates")
163 config.add_jinja2_extension('jinja2.ext.loopcontrols')
164 config.add_jinja2_search_path('appenlight:templates')
188 165 # event subscribers
189 config.add_subscriber(
190 "appenlight.subscribers.application_created",
191 "pyramid.events.ApplicationCreated",
192 )
193 config.add_subscriber(
194 "appenlight.subscribers.add_renderer_globals", "pyramid.events.BeforeRender"
195 )
196 config.add_subscriber(
197 "appenlight.subscribers.new_request", "pyramid.events.NewRequest"
198 )
199 config.add_view_predicate(
200 "context_type_class", "appenlight.predicates.contextTypeClass"
201 )
202
203 register_datastores(
204 es_conn=config.registry.es_conn,
205 redis_conn=config.registry.redis_conn,
206 redis_lockmgr=config.registry.redis_lockmgr,
207 )
166 config.add_subscriber("appenlight.subscribers.application_created",
167 "pyramid.events.ApplicationCreated")
168 config.add_subscriber("appenlight.subscribers.add_renderer_globals",
169 "pyramid.events.BeforeRender")
170 config.add_subscriber('appenlight.subscribers.new_request',
171 'pyramid.events.NewRequest')
172 config.add_view_predicate('context_type_class',
173 'appenlight.predicates.contextTypeClass')
174
175 register_datastores(es_conn=config.registry.es_conn,
176 redis_conn=config.registry.redis_conn,
177 redis_lockmgr=config.registry.redis_lockmgr)
208 178
209 179 # base stuff and scan
210 180
211 181 # need to ensure webassets exists otherwise config.override_asset()
212 182 # throws exception
213 if not os.path.exists(settings["webassets.dir"]):
214 os.mkdir(settings["webassets.dir"])
215 config.add_static_view(
216 path="appenlight:webassets", name="static", cache_max_age=3600
217 )
218 config.override_asset(
219 to_override="appenlight:webassets/", override_with=settings["webassets.dir"]
220 )
221
222 config.include("appenlight.views")
223 config.include("appenlight.views.admin")
224 config.scan(
225 ignore=["appenlight.migrations", "appenlight.scripts", "appenlight.tests"]
226 )
227
228 config.add_directive("register_appenlight_plugin", register_appenlight_plugin)
229
230 for entry_point in iter_entry_points(group="appenlight.plugins"):
183 if not os.path.exists(settings['webassets.dir']):
184 os.mkdir(settings['webassets.dir'])
185 config.add_static_view(path='appenlight:webassets',
186 name='static', cache_max_age=3600)
187 config.override_asset(to_override='appenlight:webassets/',
188 override_with=settings['webassets.dir'])
189
190 config.include('appenlight.views')
191 config.include('appenlight.views.admin')
192 config.scan(ignore=['appenlight.migrations', 'appenlight.scripts',
193 'appenlight.tests'])
194
195 config.add_directive('register_appenlight_plugin',
196 register_appenlight_plugin)
197
198 for entry_point in iter_entry_points(group='appenlight.plugins'):
231 199 plugin = entry_point.load()
232 200 plugin.includeme(config)
233 201
234 202 # include other appenlight plugins explictly if needed
235 includes = aslist(settings.get("appenlight.includes", []))
203 includes = aslist(settings.get('appenlight.includes', []))
236 204 for inc in includes:
237 205 config.include(inc)
238 206
239 207 # run this after everything registers in configurator
240 208
241 209 def pre_commit():
242 210 jinja_env = config.get_jinja2_environment()
243 jinja_env.filters["tojson"] = json.dumps
244 jinja_env.filters["toJSONUnsafe"] = jinja2_filters.toJSONUnsafe
211 jinja_env.filters['tojson'] = json.dumps
212 jinja_env.filters['toJSONUnsafe'] = jinja2_filters.toJSONUnsafe
245 213
246 214 config.action(None, pre_commit, order=PHASE3_CONFIG + 999)
247 215
248 216 def wrap_config_celery():
249 217 configure_celery(config.registry)
250 218
251 219 config.action(None, wrap_config_celery, order=PHASE3_CONFIG + 999)
252 220
253 221 app = config.make_wsgi_app()
254 222 return app
@@ -1,181 +1,171 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors
4 4 #
5 5 # Licensed under the Apache License, Version 2.0 (the "License");
6 6 # you may not use this file except in compliance with the License.
7 7 # You may obtain a copy of the License at
8 8 #
9 9 # http://www.apache.org/licenses/LICENSE-2.0
10 10 #
11 11 # Unless required by applicable law or agreed to in writing, software
12 12 # distributed under the License is distributed on an "AS IS" BASIS,
13 13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 14 # See the License for the specific language governing permissions and
15 15 # limitations under the License.
16 16
17 17 import logging
18 18
19 19 from datetime import timedelta
20 20 from celery import Celery
21 21 from celery.bin import Option
22 22 from celery.schedules import crontab
23 23 from celery.signals import worker_init, task_revoked, user_preload_options
24 24 from celery.signals import task_prerun, task_retry, task_failure, task_success
25 25 from kombu.serialization import register
26 26 from pyramid.paster import bootstrap
27 27 from pyramid.request import Request
28 28 from pyramid.scripting import prepare
29 29 from pyramid.settings import asbool
30 30 from pyramid.threadlocal import get_current_request
31 31
32 32 from appenlight.celery.encoders import json_dumps, json_loads
33 33 from appenlight_client.ext.celery import register_signals
34 34
35 35 log = logging.getLogger(__name__)
36 36
37 register(
38 "date_json",
39 json_dumps,
40 json_loads,
41 content_type="application/x-date_json",
42 content_encoding="utf-8",
43 )
37 register('date_json', json_dumps, json_loads,
38 content_type='application/x-date_json',
39 content_encoding='utf-8')
44 40
45 41 celery = Celery()
46 42
47 celery.user_options["preload"].add(
48 Option(
49 "--ini",
50 dest="ini",
51 default=None,
52 help="Specifies pyramid configuration file location.",
53 )
43 celery.user_options['preload'].add(
44 Option('--ini', dest='ini', default=None,
45 help='Specifies pyramid configuration file location.')
54 46 )
55 47
56 48
57 49 @user_preload_options.connect
58 50 def on_preload_parsed(options, **kwargs):
59 51 """
60 52 This actually configures celery from pyramid config file
61 53 """
62 celery.conf["INI_PYRAMID"] = options["ini"]
54 celery.conf['INI_PYRAMID'] = options['ini']
63 55 import appenlight_client.client as e_client
64
65 ini_location = options["ini"]
56 ini_location = options['ini']
66 57 if not ini_location:
67 raise Exception(
68 "You need to pass pyramid ini location using "
69 "--ini=filename.ini argument to the worker"
70 )
71 env = bootstrap(ini_location[0])
72 api_key = env["request"].registry.settings["appenlight.api_key"]
73 tr_config = env["request"].registry.settings.get("appenlight.transport_config")
74 CONFIG = e_client.get_config({"appenlight.api_key": api_key})
58 raise Exception('You need to pass pyramid ini location using '
59 '--ini=filename.ini argument to the worker')
60 env = bootstrap(ini_location)
61 api_key = env['request'].registry.settings['appenlight.api_key']
62 tr_config = env['request'].registry.settings.get(
63 'appenlight.transport_config')
64 CONFIG = e_client.get_config({'appenlight.api_key': api_key})
75 65 if tr_config:
76 CONFIG["appenlight.transport_config"] = tr_config
66 CONFIG['appenlight.transport_config'] = tr_config
77 67 APPENLIGHT_CLIENT = e_client.Client(CONFIG)
78 68 # log.addHandler(APPENLIGHT_CLIENT.log_handler)
79 69 register_signals(APPENLIGHT_CLIENT)
80 70 celery.pyramid = env
81 71
82 72
83 73 celery_config = {
84 "CELERY_IMPORTS": ["appenlight.celery.tasks"],
85 "CELERYD_TASK_TIME_LIMIT": 60,
86 "CELERYD_MAX_TASKS_PER_CHILD": 1000,
87 "CELERY_IGNORE_RESULT": True,
88 "CELERY_ACCEPT_CONTENT": ["date_json"],
89 "CELERY_TASK_SERIALIZER": "date_json",
90 "CELERY_RESULT_SERIALIZER": "date_json",
91 "BROKER_URL": None,
92 "CELERYD_CONCURRENCY": None,
93 "CELERY_TIMEZONE": None,
94 "CELERYBEAT_SCHEDULE": {
95 "alerting_reports": {
96 "task": "appenlight.celery.tasks.alerting_reports",
97 "schedule": timedelta(seconds=60),
74 'CELERY_IMPORTS': ["appenlight.celery.tasks", ],
75 'CELERYD_TASK_TIME_LIMIT': 60,
76 'CELERYD_MAX_TASKS_PER_CHILD': 1000,
77 'CELERY_IGNORE_RESULT': True,
78 'CELERY_ACCEPT_CONTENT': ['date_json'],
79 'CELERY_TASK_SERIALIZER': 'date_json',
80 'CELERY_RESULT_SERIALIZER': 'date_json',
81 'BROKER_URL': None,
82 'CELERYD_CONCURRENCY': None,
83 'CELERY_TIMEZONE': None,
84 'CELERYBEAT_SCHEDULE': {
85 'alerting_reports': {
86 'task': 'appenlight.celery.tasks.alerting_reports',
87 'schedule': timedelta(seconds=60)
98 88 },
99 "close_alerts": {
100 "task": "appenlight.celery.tasks.close_alerts",
101 "schedule": timedelta(seconds=60),
102 },
103 },
89 'close_alerts': {
90 'task': 'appenlight.celery.tasks.close_alerts',
91 'schedule': timedelta(seconds=60)
92 }
93 }
104 94 }
105 95 celery.config_from_object(celery_config)
106 96
107 97
108 98 def configure_celery(pyramid_registry):
109 99 settings = pyramid_registry.settings
110 celery_config["BROKER_URL"] = settings["celery.broker_url"]
111 celery_config["CELERYD_CONCURRENCY"] = settings["celery.concurrency"]
112 celery_config["CELERY_TIMEZONE"] = settings["celery.timezone"]
100 celery_config['BROKER_URL'] = settings['celery.broker_url']
101 celery_config['CELERYD_CONCURRENCY'] = settings['celery.concurrency']
102 celery_config['CELERY_TIMEZONE'] = settings['celery.timezone']
113 103
114 notifications_seconds = int(
115 settings.get("tasks.notifications_reports.interval", 60)
116 )
104 notifications_seconds = int(settings.get('tasks.notifications_reports.interval', 60))
117 105
118 celery_config["CELERYBEAT_SCHEDULE"]["notifications"] = {
119 "task": "appenlight.celery.tasks.notifications_reports",
120 "schedule": timedelta(seconds=notifications_seconds),
106 celery_config['CELERYBEAT_SCHEDULE']['notifications'] = {
107 'task': 'appenlight.celery.tasks.notifications_reports',
108 'schedule': timedelta(seconds=notifications_seconds)
121 109 }
122 110
123 celery_config["CELERYBEAT_SCHEDULE"]["daily_digest"] = {
124 "task": "appenlight.celery.tasks.daily_digest",
125 "schedule": crontab(minute=1, hour="4,12,20"),
111 celery_config['CELERYBEAT_SCHEDULE']['daily_digest'] = {
112 'task': 'appenlight.celery.tasks.daily_digest',
113 'schedule': crontab(minute=1, hour='4,12,20')
126 114 }
127 115
128 if asbool(settings.get("celery.always_eager")):
129 celery_config["CELERY_ALWAYS_EAGER"] = True
130 celery_config["CELERY_EAGER_PROPAGATES_EXCEPTIONS"] = True
116 if asbool(settings.get('celery.always_eager')):
117 celery_config['CELERY_ALWAYS_EAGER'] = True
118 celery_config['CELERY_EAGER_PROPAGATES_EXCEPTIONS'] = True
131 119
132 120 for plugin in pyramid_registry.appenlight_plugins.values():
133 if plugin.get("celery_tasks"):
134 celery_config["CELERY_IMPORTS"].extend(plugin["celery_tasks"])
135 if plugin.get("celery_beats"):
136 for name, config in plugin["celery_beats"]:
137 celery_config["CELERYBEAT_SCHEDULE"][name] = config
121 if plugin.get('celery_tasks'):
122 celery_config['CELERY_IMPORTS'].extend(plugin['celery_tasks'])
123 if plugin.get('celery_beats'):
124 for name, config in plugin['celery_beats']:
125 celery_config['CELERYBEAT_SCHEDULE'][name] = config
138 126 celery.config_from_object(celery_config)
139 127
140 128
141 129 @task_prerun.connect
142 130 def task_prerun_signal(task_id, task, args, kwargs, **kwaargs):
143 if hasattr(celery, "pyramid"):
131 if hasattr(celery, 'pyramid'):
144 132 env = celery.pyramid
145 env = prepare(registry=env["request"].registry)
146 proper_base_url = env["request"].registry.settings["mailing.app_url"]
147 tmp_req = Request.blank("/", base_url=proper_base_url)
133 env = prepare(registry=env['request'].registry)
134 proper_base_url = env['request'].registry.settings['mailing.app_url']
135 tmp_req = Request.blank('/', base_url=proper_base_url)
148 136 # ensure tasks generate url for right domain from config
149 env["request"].environ["HTTP_HOST"] = tmp_req.environ["HTTP_HOST"]
150 env["request"].environ["SERVER_PORT"] = tmp_req.environ["SERVER_PORT"]
151 env["request"].environ["SERVER_NAME"] = tmp_req.environ["SERVER_NAME"]
152 env["request"].environ["wsgi.url_scheme"] = tmp_req.environ["wsgi.url_scheme"]
137 env['request'].environ['HTTP_HOST'] = tmp_req.environ['HTTP_HOST']
138 env['request'].environ['SERVER_PORT'] = tmp_req.environ['SERVER_PORT']
139 env['request'].environ['SERVER_NAME'] = tmp_req.environ['SERVER_NAME']
140 env['request'].environ['wsgi.url_scheme'] = \
141 tmp_req.environ['wsgi.url_scheme']
153 142 get_current_request().tm.begin()
154 143
155 144
156 145 @task_success.connect
157 146 def task_success_signal(result, **kwargs):
158 147 get_current_request().tm.commit()
159 if hasattr(celery, "pyramid"):
148 if hasattr(celery, 'pyramid'):
160 149 celery.pyramid["closer"]()
161 150
162 151
163 152 @task_retry.connect
164 153 def task_retry_signal(request, reason, einfo, **kwargs):
165 154 get_current_request().tm.abort()
166 if hasattr(celery, "pyramid"):
155 if hasattr(celery, 'pyramid'):
167 156 celery.pyramid["closer"]()
168 157
169 158
170 159 @task_failure.connect
171 def task_failure_signal(task_id, exception, args, kwargs, traceback, einfo, **kwaargs):
160 def task_failure_signal(task_id, exception, args, kwargs, traceback, einfo,
161 **kwaargs):
172 162 get_current_request().tm.abort()
173 if hasattr(celery, "pyramid"):
163 if hasattr(celery, 'pyramid'):
174 164 celery.pyramid["closer"]()
175 165
176 166
177 167 @task_revoked.connect
178 168 def task_revoked_signal(request, terminated, signum, expired, **kwaargs):
179 169 get_current_request().tm.abort()
180 if hasattr(celery, "pyramid"):
170 if hasattr(celery, 'pyramid'):
181 171 celery.pyramid["closer"]()
@@ -1,51 +1,60 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors
4 4 #
5 5 # Licensed under the Apache License, Version 2.0 (the "License");
6 6 # you may not use this file except in compliance with the License.
7 7 # You may obtain a copy of the License at
8 8 #
9 9 # http://www.apache.org/licenses/LICENSE-2.0
10 10 #
11 11 # Unless required by applicable law or agreed to in writing, software
12 12 # distributed under the License is distributed on an "AS IS" BASIS,
13 13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 14 # See the License for the specific language governing permissions and
15 15 # limitations under the License.
16 16
17 17 import json
18 18 from datetime import datetime, date, timedelta
19 19
20 DATE_FORMAT = "%Y-%m-%dT%H:%M:%S.%f"
20 DATE_FORMAT = '%Y-%m-%dT%H:%M:%S.%f'
21 21
22 22
23 23 class DateEncoder(json.JSONEncoder):
24 24 def default(self, obj):
25 25 if isinstance(obj, datetime):
26 return {"__type__": "__datetime__", "iso": obj.strftime(DATE_FORMAT)}
26 return {
27 '__type__': '__datetime__',
28 'iso': obj.strftime(DATE_FORMAT)
29 }
27 30 elif isinstance(obj, date):
28 return {"__type__": "__date__", "iso": obj.strftime(DATE_FORMAT)}
31 return {
32 '__type__': '__date__',
33 'iso': obj.strftime(DATE_FORMAT)
34 }
29 35 elif isinstance(obj, timedelta):
30 return {"__type__": "__timedelta__", "seconds": obj.total_seconds()}
36 return {
37 '__type__': '__timedelta__',
38 'seconds': obj.total_seconds()
39 }
31 40 else:
32 41 return json.JSONEncoder.default(self, obj)
33 42
34 43
35 44 def date_decoder(dct):
36 if "__type__" in dct:
37 if dct["__type__"] == "__datetime__":
38 return datetime.strptime(dct["iso"], DATE_FORMAT)
39 elif dct["__type__"] == "__date__":
40 return datetime.strptime(dct["iso"], DATE_FORMAT).date()
41 elif dct["__type__"] == "__timedelta__":
42 return timedelta(seconds=dct["seconds"])
45 if '__type__' in dct:
46 if dct['__type__'] == '__datetime__':
47 return datetime.strptime(dct['iso'], DATE_FORMAT)
48 elif dct['__type__'] == '__date__':
49 return datetime.strptime(dct['iso'], DATE_FORMAT).date()
50 elif dct['__type__'] == '__timedelta__':
51 return timedelta(seconds=dct['seconds'])
43 52 return dct
44 53
45 54
46 55 def json_dumps(obj):
47 56 return json.dumps(obj, cls=DateEncoder)
48 57
49 58
50 59 def json_loads(obj):
51 return json.loads(obj.decode("utf8"), object_hook=date_decoder)
60 return json.loads(obj.decode('utf8'), object_hook=date_decoder)
@@ -1,705 +1,660 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors
4 4 #
5 5 # Licensed under the Apache License, Version 2.0 (the "License");
6 6 # you may not use this file except in compliance with the License.
7 7 # You may obtain a copy of the License at
8 8 #
9 9 # http://www.apache.org/licenses/LICENSE-2.0
10 10 #
11 11 # Unless required by applicable law or agreed to in writing, software
12 12 # distributed under the License is distributed on an "AS IS" BASIS,
13 13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 14 # See the License for the specific language governing permissions and
15 15 # limitations under the License.
16 16
17 17 import bisect
18 18 import collections
19 19 import math
20 20 from datetime import datetime, timedelta
21 21
22 22 import sqlalchemy as sa
23 import elasticsearch.exceptions
24 import elasticsearch.helpers
23 import pyelasticsearch
25 24
26 25 from celery.utils.log import get_task_logger
27 26 from zope.sqlalchemy import mark_changed
28 27 from pyramid.threadlocal import get_current_request, get_current_registry
29 from ziggurat_foundations.models.services.resource import ResourceService
30
31 28 from appenlight.celery import celery
32 29 from appenlight.models.report_group import ReportGroup
33 30 from appenlight.models import DBSession, Datastores
34 31 from appenlight.models.report import Report
35 32 from appenlight.models.log import Log
36 33 from appenlight.models.metric import Metric
37 34 from appenlight.models.event import Event
38 35
39 36 from appenlight.models.services.application import ApplicationService
40 37 from appenlight.models.services.event import EventService
41 38 from appenlight.models.services.log import LogService
42 39 from appenlight.models.services.report import ReportService
43 40 from appenlight.models.services.report_group import ReportGroupService
44 41 from appenlight.models.services.user import UserService
45 42 from appenlight.models.tag import Tag
46 43 from appenlight.lib import print_traceback
47 44 from appenlight.lib.utils import parse_proto, in_batches
48 45 from appenlight.lib.ext_json import json
49 46 from appenlight.lib.redis_keys import REDIS_KEYS
50 47 from appenlight.lib.enums import ReportType
51 48
52 49 log = get_task_logger(__name__)
53 50
54 sample_boundries = (
55 list(range(100, 1000, 100))
56 + list(range(1000, 10000, 1000))
57 + list(range(10000, 100000, 5000))
58 )
51 sample_boundries = list(range(100, 1000, 100)) + \
52 list(range(1000, 10000, 1000)) + \
53 list(range(10000, 100000, 5000))
59 54
60 55
61 56 def pick_sample(total_occurences, report_type=None):
62 57 every = 1.0
63 58 position = bisect.bisect_left(sample_boundries, total_occurences)
64 59 if position > 0:
65 60 if report_type == ReportType.not_found:
66 61 divide = 10.0
67 62 else:
68 63 divide = 100.0
69 64 every = sample_boundries[position - 1] / divide
70 65 return total_occurences % every == 0
71 66
72 67
73 68 @celery.task(queue="default", default_retry_delay=1, max_retries=2)
74 69 def test_exception_task():
75 log.error("test celery log", extra={"location": "celery"})
76 log.warning("test celery log", extra={"location": "celery"})
77 raise Exception("Celery exception test")
70 log.error('test celery log', extra={'location': 'celery'})
71 log.warning('test celery log', extra={'location': 'celery'})
72 raise Exception('Celery exception test')
78 73
79 74
80 75 @celery.task(queue="default", default_retry_delay=1, max_retries=2)
81 76 def test_retry_exception_task():
82 77 try:
83 78 import time
84 79
85 80 time.sleep(1.3)
86 log.error("test retry celery log", extra={"location": "celery"})
87 log.warning("test retry celery log", extra={"location": "celery"})
88 raise Exception("Celery exception test")
81 log.error('test retry celery log', extra={'location': 'celery'})
82 log.warning('test retry celery log', extra={'location': 'celery'})
83 raise Exception('Celery exception test')
89 84 except Exception as exc:
90 if celery.conf["CELERY_EAGER_PROPAGATES_EXCEPTIONS"]:
91 raise
92 85 test_retry_exception_task.retry(exc=exc)
93 86
94 87
95 88 @celery.task(queue="reports", default_retry_delay=600, max_retries=144)
96 89 def add_reports(resource_id, request_params, dataset, **kwargs):
97 proto_version = parse_proto(request_params.get("protocol_version", ""))
90 proto_version = parse_proto(request_params.get('protocol_version', ''))
98 91 current_time = datetime.utcnow().replace(second=0, microsecond=0)
99 92 try:
100 93 # we will store solr docs here for single insert
101 94 es_report_docs = {}
102 95 es_report_group_docs = {}
103 96 resource = ApplicationService.by_id(resource_id)
104 97
105 98 tags = []
106 99 es_slow_calls_docs = {}
107 100 es_reports_stats_rows = {}
108 101 for report_data in dataset:
109 102 # build report details for later
110 103 added_details = 0
111 104 report = Report()
112 105 report.set_data(report_data, resource, proto_version)
113 106 report._skip_ft_index = True
114 107
115 108 # find latest group in this months partition
116 109 report_group = ReportGroupService.by_hash_and_resource(
117 110 report.resource_id,
118 111 report.grouping_hash,
119 since_when=datetime.utcnow().date().replace(day=1),
112 since_when=datetime.utcnow().date().replace(day=1)
120 113 )
121 occurences = report_data.get("occurences", 1)
114 occurences = report_data.get('occurences', 1)
122 115 if not report_group:
123 116 # total reports will be +1 moment later
124 report_group = ReportGroup(
125 grouping_hash=report.grouping_hash,
126 occurences=0,
127 total_reports=0,
128 last_report=0,
129 priority=report.priority,
130 error=report.error,
131 first_timestamp=report.start_time,
132 )
117 report_group = ReportGroup(grouping_hash=report.grouping_hash,
118 occurences=0, total_reports=0,
119 last_report=0,
120 priority=report.priority,
121 error=report.error,
122 first_timestamp=report.start_time)
133 123 report_group._skip_ft_index = True
134 124 report_group.report_type = report.report_type
135 125 report.report_group_time = report_group.first_timestamp
136 add_sample = pick_sample(
137 report_group.occurences, report_type=report_group.report_type
138 )
126 add_sample = pick_sample(report_group.occurences,
127 report_type=report_group.report_type)
139 128 if add_sample:
140 129 resource.report_groups.append(report_group)
141 130 report_group.reports.append(report)
142 131 added_details += 1
143 132 DBSession.flush()
144 133 if report.partition_id not in es_report_docs:
145 134 es_report_docs[report.partition_id] = []
146 135 es_report_docs[report.partition_id].append(report.es_doc())
147 136 tags.extend(list(report.tags.items()))
148 137 slow_calls = report.add_slow_calls(report_data, report_group)
149 138 DBSession.flush()
150 139 for s_call in slow_calls:
151 140 if s_call.partition_id not in es_slow_calls_docs:
152 141 es_slow_calls_docs[s_call.partition_id] = []
153 es_slow_calls_docs[s_call.partition_id].append(s_call.es_doc())
142 es_slow_calls_docs[s_call.partition_id].append(
143 s_call.es_doc())
154 144 # try generating new stat rows if needed
155 145 else:
156 146 # required for postprocessing to not fail later
157 147 report.report_group = report_group
158 148
159 stat_row = ReportService.generate_stat_rows(report, resource, report_group)
149 stat_row = ReportService.generate_stat_rows(
150 report, resource, report_group)
160 151 if stat_row.partition_id not in es_reports_stats_rows:
161 152 es_reports_stats_rows[stat_row.partition_id] = []
162 es_reports_stats_rows[stat_row.partition_id].append(stat_row.es_doc())
153 es_reports_stats_rows[stat_row.partition_id].append(
154 stat_row.es_doc())
163 155
164 156 # see if we should mark 10th occurence of report
165 157 last_occurences_10 = int(math.floor(report_group.occurences / 10))
166 curr_occurences_10 = int(
167 math.floor((report_group.occurences + report.occurences) / 10)
168 )
169 last_occurences_100 = int(math.floor(report_group.occurences / 100))
170 curr_occurences_100 = int(
171 math.floor((report_group.occurences + report.occurences) / 100)
172 )
158 curr_occurences_10 = int(math.floor(
159 (report_group.occurences + report.occurences) / 10))
160 last_occurences_100 = int(
161 math.floor(report_group.occurences / 100))
162 curr_occurences_100 = int(math.floor(
163 (report_group.occurences + report.occurences) / 100))
173 164 notify_occurences_10 = last_occurences_10 != curr_occurences_10
174 165 notify_occurences_100 = last_occurences_100 != curr_occurences_100
175 166 report_group.occurences = ReportGroup.occurences + occurences
176 167 report_group.last_timestamp = report.start_time
177 168 report_group.summed_duration = ReportGroup.summed_duration + report.duration
178 169 summed_duration = ReportGroup.summed_duration + report.duration
179 170 summed_occurences = ReportGroup.occurences + occurences
180 171 report_group.average_duration = summed_duration / summed_occurences
181 172 report_group.run_postprocessing(report)
182 173 if added_details:
183 174 report_group.total_reports = ReportGroup.total_reports + 1
184 175 report_group.last_report = report.id
185 report_group.set_notification_info(
186 notify_10=notify_occurences_10, notify_100=notify_occurences_100
187 )
176 report_group.set_notification_info(notify_10=notify_occurences_10,
177 notify_100=notify_occurences_100)
188 178 DBSession.flush()
189 179 report_group.get_report().notify_channel(report_group)
190 180 if report_group.partition_id not in es_report_group_docs:
191 181 es_report_group_docs[report_group.partition_id] = []
192 182 es_report_group_docs[report_group.partition_id].append(
193 report_group.es_doc()
194 )
183 report_group.es_doc())
195 184
196 action = "REPORT"
197 log_msg = "%s: %s %s, client: %s, proto: %s" % (
185 action = 'REPORT'
186 log_msg = '%s: %s %s, client: %s, proto: %s' % (
198 187 action,
199 report_data.get("http_status", "unknown"),
188 report_data.get('http_status', 'unknown'),
200 189 str(resource),
201 report_data.get("client"),
202 proto_version,
203 )
190 report_data.get('client'),
191 proto_version)
204 192 log.info(log_msg)
205 193 total_reports = len(dataset)
206 194 redis_pipeline = Datastores.redis.pipeline(transaction=False)
207 key = REDIS_KEYS["counters"]["reports_per_minute"].format(current_time)
195 key = REDIS_KEYS['counters']['reports_per_minute'].format(current_time)
208 196 redis_pipeline.incr(key, total_reports)
209 197 redis_pipeline.expire(key, 3600 * 24)
210 key = REDIS_KEYS["counters"]["events_per_minute_per_user"].format(
211 resource.owner_user_id, current_time
212 )
198 key = REDIS_KEYS['counters']['events_per_minute_per_user'].format(
199 resource.owner_user_id, current_time)
213 200 redis_pipeline.incr(key, total_reports)
214 201 redis_pipeline.expire(key, 3600)
215 key = REDIS_KEYS["counters"]["reports_per_hour_per_app"].format(
216 resource_id, current_time.replace(minute=0)
217 )
202 key = REDIS_KEYS['counters']['reports_per_hour_per_app'].format(
203 resource_id, current_time.replace(minute=0))
218 204 redis_pipeline.incr(key, total_reports)
219 205 redis_pipeline.expire(key, 3600 * 24 * 7)
220 206 redis_pipeline.sadd(
221 REDIS_KEYS["apps_that_got_new_data_per_hour"].format(
222 current_time.replace(minute=0)
223 ),
224 resource_id,
225 )
207 REDIS_KEYS['apps_that_got_new_data_per_hour'].format(
208 current_time.replace(minute=0)), resource_id)
226 209 redis_pipeline.execute()
227 210
228 211 add_reports_es(es_report_group_docs, es_report_docs)
229 212 add_reports_slow_calls_es(es_slow_calls_docs)
230 213 add_reports_stats_rows_es(es_reports_stats_rows)
231 214 return True
232 215 except Exception as exc:
233 216 print_traceback(log)
234 if celery.conf["CELERY_EAGER_PROPAGATES_EXCEPTIONS"]:
235 raise
236 217 add_reports.retry(exc=exc)
237 218
238 219
239 220 @celery.task(queue="es", default_retry_delay=600, max_retries=144)
240 221 def add_reports_es(report_group_docs, report_docs):
241 222 for k, v in report_group_docs.items():
242 to_update = {"_index": k, "_type": "report"}
243 [i.update(to_update) for i in v]
244 elasticsearch.helpers.bulk(Datastores.es, v)
223 Datastores.es.bulk_index(k, 'report_group', v, id_field="_id")
245 224 for k, v in report_docs.items():
246 to_update = {"_index": k, "_type": "report"}
247 [i.update(to_update) for i in v]
248 elasticsearch.helpers.bulk(Datastores.es, v)
225 Datastores.es.bulk_index(k, 'report', v, id_field="_id",
226 parent_field='_parent')
249 227
250 228
251 229 @celery.task(queue="es", default_retry_delay=600, max_retries=144)
252 230 def add_reports_slow_calls_es(es_docs):
253 231 for k, v in es_docs.items():
254 to_update = {"_index": k, "_type": "log"}
255 [i.update(to_update) for i in v]
256 elasticsearch.helpers.bulk(Datastores.es, v)
232 Datastores.es.bulk_index(k, 'log', v)
257 233
258 234
259 235 @celery.task(queue="es", default_retry_delay=600, max_retries=144)
260 236 def add_reports_stats_rows_es(es_docs):
261 237 for k, v in es_docs.items():
262 to_update = {"_index": k, "_type": "report"}
263 [i.update(to_update) for i in v]
264 elasticsearch.helpers.bulk(Datastores.es, v)
238 Datastores.es.bulk_index(k, 'log', v)
265 239
266 240
267 241 @celery.task(queue="logs", default_retry_delay=600, max_retries=144)
268 242 def add_logs(resource_id, request_params, dataset, **kwargs):
269 proto_version = request_params.get("protocol_version")
243 proto_version = request_params.get('protocol_version')
270 244 current_time = datetime.utcnow().replace(second=0, microsecond=0)
271 245
272 246 try:
273 247 es_docs = collections.defaultdict(list)
274 248 resource = ApplicationService.by_id_cached()(resource_id)
275 249 resource = DBSession.merge(resource, load=False)
276 250 ns_pairs = []
277 251 for entry in dataset:
278 252 # gather pk and ns so we can remove older versions of row later
279 if entry["primary_key"] is not None:
280 ns_pairs.append({"pk": entry["primary_key"], "ns": entry["namespace"]})
253 if entry['primary_key'] is not None:
254 ns_pairs.append({"pk": entry['primary_key'],
255 "ns": entry['namespace']})
281 256 log_entry = Log()
282 257 log_entry.set_data(entry, resource=resource)
283 258 log_entry._skip_ft_index = True
284 259 resource.logs.append(log_entry)
285 260 DBSession.flush()
286 261 # insert non pk rows first
287 if entry["primary_key"] is None:
262 if entry['primary_key'] is None:
288 263 es_docs[log_entry.partition_id].append(log_entry.es_doc())
289 264
290 # 2nd pass to delete all log entries from db for same pk/ns pair
265 # 2nd pass to delete all log entries from db foe same pk/ns pair
291 266 if ns_pairs:
292 267 ids_to_delete = []
293 268 es_docs = collections.defaultdict(list)
294 269 es_docs_to_delete = collections.defaultdict(list)
295 270 found_pkey_logs = LogService.query_by_primary_key_and_namespace(
296 list_of_pairs=ns_pairs
297 )
271 list_of_pairs=ns_pairs)
298 272 log_dict = {}
299 273 for log_entry in found_pkey_logs:
300 274 log_key = (log_entry.primary_key, log_entry.namespace)
301 275 if log_key not in log_dict:
302 276 log_dict[log_key] = []
303 277 log_dict[log_key].append(log_entry)
304 278
305 279 for ns, entry_list in log_dict.items():
306 280 entry_list = sorted(entry_list, key=lambda x: x.timestamp)
307 281 # newest row needs to be indexed in es
308 282 log_entry = entry_list[-1]
309 283 # delete everything from pg and ES, leave the last row in pg
310 284 for e in entry_list[:-1]:
311 285 ids_to_delete.append(e.log_id)
312 286 es_docs_to_delete[e.partition_id].append(e.delete_hash)
313 287
314 es_docs_to_delete[log_entry.partition_id].append(log_entry.delete_hash)
288 es_docs_to_delete[log_entry.partition_id].append(
289 log_entry.delete_hash)
315 290
316 291 es_docs[log_entry.partition_id].append(log_entry.es_doc())
317 292
318 293 if ids_to_delete:
319 query = DBSession.query(Log).filter(Log.log_id.in_(ids_to_delete))
294 query = DBSession.query(Log).filter(
295 Log.log_id.in_(ids_to_delete))
320 296 query.delete(synchronize_session=False)
321 297 if es_docs_to_delete:
322 298 # batch this to avoid problems with default ES bulk limits
323 299 for es_index in es_docs_to_delete.keys():
324 300 for batch in in_batches(es_docs_to_delete[es_index], 20):
325 query = {"query": {"terms": {"delete_hash": batch}}}
301 query = {'terms': {'delete_hash': batch}}
326 302
327 303 try:
328 304 Datastores.es.delete_by_query(
329 index=es_index,
330 doc_type="log",
331 body=query,
332 conflicts="proceed",
333 )
334 except elasticsearch.exceptions.NotFoundError as exc:
335 msg = "skipping index {}".format(es_index)
305 es_index, 'log', query)
306 except pyelasticsearch.ElasticHttpNotFoundError as exc:
307 msg = 'skipping index {}'.format(es_index)
336 308 log.info(msg)
337 309
338 310 total_logs = len(dataset)
339 311
340 log_msg = "LOG_NEW: %s, entries: %s, proto:%s" % (
312 log_msg = 'LOG_NEW: %s, entries: %s, proto:%s' % (
341 313 str(resource),
342 314 total_logs,
343 proto_version,
344 )
315 proto_version)
345 316 log.info(log_msg)
346 317 # mark_changed(session)
347 318 redis_pipeline = Datastores.redis.pipeline(transaction=False)
348 key = REDIS_KEYS["counters"]["logs_per_minute"].format(current_time)
319 key = REDIS_KEYS['counters']['logs_per_minute'].format(current_time)
349 320 redis_pipeline.incr(key, total_logs)
350 321 redis_pipeline.expire(key, 3600 * 24)
351 key = REDIS_KEYS["counters"]["events_per_minute_per_user"].format(
352 resource.owner_user_id, current_time
353 )
322 key = REDIS_KEYS['counters']['events_per_minute_per_user'].format(
323 resource.owner_user_id, current_time)
354 324 redis_pipeline.incr(key, total_logs)
355 325 redis_pipeline.expire(key, 3600)
356 key = REDIS_KEYS["counters"]["logs_per_hour_per_app"].format(
357 resource_id, current_time.replace(minute=0)
358 )
326 key = REDIS_KEYS['counters']['logs_per_hour_per_app'].format(
327 resource_id, current_time.replace(minute=0))
359 328 redis_pipeline.incr(key, total_logs)
360 329 redis_pipeline.expire(key, 3600 * 24 * 7)
361 330 redis_pipeline.sadd(
362 REDIS_KEYS["apps_that_got_new_data_per_hour"].format(
363 current_time.replace(minute=0)
364 ),
365 resource_id,
366 )
331 REDIS_KEYS['apps_that_got_new_data_per_hour'].format(
332 current_time.replace(minute=0)), resource_id)
367 333 redis_pipeline.execute()
368 334 add_logs_es(es_docs)
369 335 return True
370 336 except Exception as exc:
371 337 print_traceback(log)
372 if celery.conf["CELERY_EAGER_PROPAGATES_EXCEPTIONS"]:
373 raise
374 338 add_logs.retry(exc=exc)
375 339
376 340
377 341 @celery.task(queue="es", default_retry_delay=600, max_retries=144)
378 342 def add_logs_es(es_docs):
379 343 for k, v in es_docs.items():
380 to_update = {"_index": k, "_type": "log"}
381 [i.update(to_update) for i in v]
382 elasticsearch.helpers.bulk(Datastores.es, v)
344 Datastores.es.bulk_index(k, 'log', v)
383 345
384 346
385 347 @celery.task(queue="metrics", default_retry_delay=600, max_retries=144)
386 348 def add_metrics(resource_id, request_params, dataset, proto_version):
387 349 current_time = datetime.utcnow().replace(second=0, microsecond=0)
388 350 try:
389 351 resource = ApplicationService.by_id_cached()(resource_id)
390 352 resource = DBSession.merge(resource, load=False)
391 353 es_docs = []
392 354 rows = []
393 355 for metric in dataset:
394 tags = dict(metric["tags"])
395 server_n = tags.get("server_name", metric["server_name"]).lower()
396 tags["server_name"] = server_n or "unknown"
356 tags = dict(metric['tags'])
357 server_n = tags.get('server_name', metric['server_name']).lower()
358 tags['server_name'] = server_n or 'unknown'
397 359 new_metric = Metric(
398 timestamp=metric["timestamp"],
360 timestamp=metric['timestamp'],
399 361 resource_id=resource.resource_id,
400 namespace=metric["namespace"],
401 tags=tags,
402 )
362 namespace=metric['namespace'],
363 tags=tags)
403 364 rows.append(new_metric)
404 365 es_docs.append(new_metric.es_doc())
405 366 session = DBSession()
406 367 session.bulk_save_objects(rows)
407 368 session.flush()
408 369
409 action = "METRICS"
410 metrics_msg = "%s: %s, metrics: %s, proto:%s" % (
370 action = 'METRICS'
371 metrics_msg = '%s: %s, metrics: %s, proto:%s' % (
411 372 action,
412 373 str(resource),
413 374 len(dataset),
414 proto_version,
375 proto_version
415 376 )
416 377 log.info(metrics_msg)
417 378
418 379 mark_changed(session)
419 380 redis_pipeline = Datastores.redis.pipeline(transaction=False)
420 key = REDIS_KEYS["counters"]["metrics_per_minute"].format(current_time)
381 key = REDIS_KEYS['counters']['metrics_per_minute'].format(current_time)
421 382 redis_pipeline.incr(key, len(rows))
422 383 redis_pipeline.expire(key, 3600 * 24)
423 key = REDIS_KEYS["counters"]["events_per_minute_per_user"].format(
424 resource.owner_user_id, current_time
425 )
384 key = REDIS_KEYS['counters']['events_per_minute_per_user'].format(
385 resource.owner_user_id, current_time)
426 386 redis_pipeline.incr(key, len(rows))
427 387 redis_pipeline.expire(key, 3600)
428 key = REDIS_KEYS["counters"]["metrics_per_hour_per_app"].format(
429 resource_id, current_time.replace(minute=0)
430 )
388 key = REDIS_KEYS['counters']['metrics_per_hour_per_app'].format(
389 resource_id, current_time.replace(minute=0))
431 390 redis_pipeline.incr(key, len(rows))
432 391 redis_pipeline.expire(key, 3600 * 24 * 7)
433 392 redis_pipeline.sadd(
434 REDIS_KEYS["apps_that_got_new_data_per_hour"].format(
435 current_time.replace(minute=0)
436 ),
437 resource_id,
438 )
393 REDIS_KEYS['apps_that_got_new_data_per_hour'].format(
394 current_time.replace(minute=0)), resource_id)
439 395 redis_pipeline.execute()
440 396 add_metrics_es(es_docs)
441 397 return True
442 398 except Exception as exc:
443 399 print_traceback(log)
444 if celery.conf["CELERY_EAGER_PROPAGATES_EXCEPTIONS"]:
445 raise
446 400 add_metrics.retry(exc=exc)
447 401
448 402
449 403 @celery.task(queue="es", default_retry_delay=600, max_retries=144)
450 404 def add_metrics_es(es_docs):
451 405 for doc in es_docs:
452 partition = "rcae_m_%s" % doc["timestamp"].strftime("%Y_%m_%d")
453 Datastores.es.index(partition, "log", doc)
406 partition = 'rcae_m_%s' % doc['timestamp'].strftime('%Y_%m_%d')
407 Datastores.es.index(partition, 'log', doc)
454 408
455 409
456 410 @celery.task(queue="default", default_retry_delay=5, max_retries=2)
457 411 def check_user_report_notifications(resource_id):
458 412 since_when = datetime.utcnow()
459 413 try:
460 414 request = get_current_request()
461 415 application = ApplicationService.by_id(resource_id)
462 416 if not application:
463 417 return
464 error_key = REDIS_KEYS["reports_to_notify_per_type_per_app"].format(
465 ReportType.error, resource_id
466 )
467 slow_key = REDIS_KEYS["reports_to_notify_per_type_per_app"].format(
468 ReportType.slow, resource_id
469 )
418 error_key = REDIS_KEYS['reports_to_notify_per_type_per_app'].format(
419 ReportType.error, resource_id)
420 slow_key = REDIS_KEYS['reports_to_notify_per_type_per_app'].format(
421 ReportType.slow, resource_id)
470 422 error_group_ids = Datastores.redis.smembers(error_key)
471 423 slow_group_ids = Datastores.redis.smembers(slow_key)
472 424 Datastores.redis.delete(error_key)
473 425 Datastores.redis.delete(slow_key)
474 426 err_gids = [int(g_id) for g_id in error_group_ids]
475 427 slow_gids = [int(g_id) for g_id in list(slow_group_ids)]
476 428 group_ids = err_gids + slow_gids
477 429 occurence_dict = {}
478 430 for g_id in group_ids:
479 key = REDIS_KEYS["counters"]["report_group_occurences"].format(g_id)
431 key = REDIS_KEYS['counters']['report_group_occurences'].format(
432 g_id)
480 433 val = Datastores.redis.get(key)
481 434 Datastores.redis.delete(key)
482 435 if val:
483 436 occurence_dict[g_id] = int(val)
484 437 else:
485 438 occurence_dict[g_id] = 1
486 439 report_groups = ReportGroupService.by_ids(group_ids)
487 440 report_groups.options(sa.orm.joinedload(ReportGroup.last_report_ref))
488 441
489 442 ApplicationService.check_for_groups_alert(
490 application,
491 "alert",
492 report_groups=report_groups,
493 occurence_dict=occurence_dict,
494 )
495 users = set(
496 [p.user for p in ResourceService.users_for_perm(application, "view")]
497 )
443 application, 'alert', report_groups=report_groups,
444 occurence_dict=occurence_dict)
445 users = set([p.user for p in application.users_for_perm('view')])
498 446 report_groups = report_groups.all()
499 447 for user in users:
500 UserService.report_notify(
501 user,
502 request,
503 application,
504 report_groups=report_groups,
505 occurence_dict=occurence_dict,
506 )
448 UserService.report_notify(user, request, application,
449 report_groups=report_groups,
450 occurence_dict=occurence_dict)
507 451 for group in report_groups:
508 452 # marks report_groups as notified
509 453 if not group.notified:
510 454 group.notified = True
511 455 except Exception as exc:
512 456 print_traceback(log)
513 457 raise
514 458
515 459
516 460 @celery.task(queue="default", default_retry_delay=5, max_retries=2)
517 461 def check_alerts(resource_id):
518 462 since_when = datetime.utcnow()
519 463 try:
520 464 request = get_current_request()
521 465 application = ApplicationService.by_id(resource_id)
522 466 if not application:
523 467 return
524 error_key = REDIS_KEYS["reports_to_notify_per_type_per_app_alerting"].format(
525 ReportType.error, resource_id
526 )
527 slow_key = REDIS_KEYS["reports_to_notify_per_type_per_app_alerting"].format(
528 ReportType.slow, resource_id
529 )
468 error_key = REDIS_KEYS[
469 'reports_to_notify_per_type_per_app_alerting'].format(
470 ReportType.error, resource_id)
471 slow_key = REDIS_KEYS[
472 'reports_to_notify_per_type_per_app_alerting'].format(
473 ReportType.slow, resource_id)
530 474 error_group_ids = Datastores.redis.smembers(error_key)
531 475 slow_group_ids = Datastores.redis.smembers(slow_key)
532 476 Datastores.redis.delete(error_key)
533 477 Datastores.redis.delete(slow_key)
534 478 err_gids = [int(g_id) for g_id in error_group_ids]
535 479 slow_gids = [int(g_id) for g_id in list(slow_group_ids)]
536 480 group_ids = err_gids + slow_gids
537 481 occurence_dict = {}
538 482 for g_id in group_ids:
539 key = REDIS_KEYS["counters"]["report_group_occurences_alerting"].format(
540 g_id
541 )
483 key = REDIS_KEYS['counters'][
484 'report_group_occurences_alerting'].format(
485 g_id)
542 486 val = Datastores.redis.get(key)
543 487 Datastores.redis.delete(key)
544 488 if val:
545 489 occurence_dict[g_id] = int(val)
546 490 else:
547 491 occurence_dict[g_id] = 1
548 492 report_groups = ReportGroupService.by_ids(group_ids)
549 493 report_groups.options(sa.orm.joinedload(ReportGroup.last_report_ref))
550 494
551 495 ApplicationService.check_for_groups_alert(
552 application,
553 "alert",
554 report_groups=report_groups,
555 occurence_dict=occurence_dict,
556 since_when=since_when,
557 )
496 application, 'alert', report_groups=report_groups,
497 occurence_dict=occurence_dict, since_when=since_when)
558 498 except Exception as exc:
559 499 print_traceback(log)
560 500 raise
561 501
562 502
563 503 @celery.task(queue="default", default_retry_delay=1, max_retries=2)
564 504 def close_alerts():
565 log.warning("Checking alerts")
505 log.warning('Checking alerts')
566 506 since_when = datetime.utcnow()
567 507 try:
568 event_types = [
569 Event.types["error_report_alert"],
570 Event.types["slow_report_alert"],
571 ]
572 statuses = [Event.statuses["active"]]
508 event_types = [Event.types['error_report_alert'],
509 Event.types['slow_report_alert'], ]
510 statuses = [Event.statuses['active']]
573 511 # get events older than 5 min
574 512 events = EventService.by_type_and_status(
575 event_types, statuses, older_than=(since_when - timedelta(minutes=5))
576 )
513 event_types,
514 statuses,
515 older_than=(since_when - timedelta(minutes=5)))
577 516 for event in events:
578 517 # see if we can close them
579 event.validate_or_close(since_when=(since_when - timedelta(minutes=1)))
518 event.validate_or_close(
519 since_when=(since_when - timedelta(minutes=1)))
580 520 except Exception as exc:
581 521 print_traceback(log)
582 522 raise
583 523
584 524
585 525 @celery.task(queue="default", default_retry_delay=600, max_retries=144)
586 526 def update_tag_counter(tag_name, tag_value, count):
587 527 try:
588 query = (
589 DBSession.query(Tag)
590 .filter(Tag.name == tag_name)
591 .filter(
592 sa.cast(Tag.value, sa.types.TEXT)
593 == sa.cast(json.dumps(tag_value), sa.types.TEXT)
594 )
595 )
596 query.update(
597 {"times_seen": Tag.times_seen + count, "last_timestamp": datetime.utcnow()},
598 synchronize_session=False,
599 )
528 query = DBSession.query(Tag).filter(Tag.name == tag_name).filter(
529 sa.cast(Tag.value, sa.types.TEXT) == sa.cast(json.dumps(tag_value),
530 sa.types.TEXT))
531 query.update({'times_seen': Tag.times_seen + count,
532 'last_timestamp': datetime.utcnow()},
533 synchronize_session=False)
600 534 session = DBSession()
601 535 mark_changed(session)
602 536 return True
603 537 except Exception as exc:
604 538 print_traceback(log)
605 if celery.conf["CELERY_EAGER_PROPAGATES_EXCEPTIONS"]:
606 raise
607 539 update_tag_counter.retry(exc=exc)
608 540
609 541
610 542 @celery.task(queue="default")
611 543 def update_tag_counters():
612 544 """
613 545 Sets task to update counters for application tags
614 546 """
615 tags = Datastores.redis.lrange(REDIS_KEYS["seen_tag_list"], 0, -1)
616 Datastores.redis.delete(REDIS_KEYS["seen_tag_list"])
547 tags = Datastores.redis.lrange(REDIS_KEYS['seen_tag_list'], 0, -1)
548 Datastores.redis.delete(REDIS_KEYS['seen_tag_list'])
617 549 c = collections.Counter(tags)
618 550 for t_json, count in c.items():
619 551 tag_info = json.loads(t_json)
620 552 update_tag_counter.delay(tag_info[0], tag_info[1], count)
621 553
622 554
623 555 @celery.task(queue="default")
624 556 def daily_digest():
625 557 """
626 558 Sends daily digest with top 50 error reports
627 559 """
628 560 request = get_current_request()
629 apps = Datastores.redis.smembers(REDIS_KEYS["apps_that_had_reports"])
630 Datastores.redis.delete(REDIS_KEYS["apps_that_had_reports"])
561 apps = Datastores.redis.smembers(REDIS_KEYS['apps_that_had_reports'])
562 Datastores.redis.delete(REDIS_KEYS['apps_that_had_reports'])
631 563 since_when = datetime.utcnow() - timedelta(hours=8)
632 log.warning("Generating daily digests")
564 log.warning('Generating daily digests')
633 565 for resource_id in apps:
634 resource_id = resource_id.decode("utf8")
566 resource_id = resource_id.decode('utf8')
635 567 end_date = datetime.utcnow().replace(microsecond=0, second=0)
636 filter_settings = {
637 "resource": [resource_id],
638 "tags": [{"name": "type", "value": ["error"], "op": None}],
639 "type": "error",
640 "start_date": since_when,
641 "end_date": end_date,
642 }
568 filter_settings = {'resource': [resource_id],
569 'tags': [{'name': 'type',
570 'value': ['error'], 'op': None}],
571 'type': 'error', 'start_date': since_when,
572 'end_date': end_date}
643 573
644 574 reports = ReportGroupService.get_trending(
645 request, filter_settings=filter_settings, limit=50
646 )
575 request, filter_settings=filter_settings, limit=50)
647 576
648 577 application = ApplicationService.by_id(resource_id)
649 578 if application:
650 users = set(
651 [p.user for p in ResourceService.users_for_perm(application, "view")]
652 )
579 users = set([p.user for p in application.users_for_perm('view')])
653 580 for user in users:
654 user.send_digest(
655 request, application, reports=reports, since_when=since_when
656 )
581 user.send_digest(request, application, reports=reports,
582 since_when=since_when)
657 583
658 584
659 585 @celery.task(queue="default")
660 586 def notifications_reports():
661 587 """
662 588 Loop that checks redis for info and then issues new tasks to celery to
663 589 issue notifications
664 590 """
665 apps = Datastores.redis.smembers(REDIS_KEYS["apps_that_had_reports"])
666 Datastores.redis.delete(REDIS_KEYS["apps_that_had_reports"])
591 apps = Datastores.redis.smembers(REDIS_KEYS['apps_that_had_reports'])
592 Datastores.redis.delete(REDIS_KEYS['apps_that_had_reports'])
667 593 for app in apps:
668 log.warning("Notify for app: %s" % app)
669 check_user_report_notifications.delay(app.decode("utf8"))
670
594 log.warning('Notify for app: %s' % app)
595 check_user_report_notifications.delay(app.decode('utf8'))
671 596
672 597 @celery.task(queue="default")
673 598 def alerting_reports():
674 599 """
675 600 Loop that checks redis for info and then issues new tasks to celery to
676 601 perform the following:
677 602 - which applications should have new alerts opened
678 603 """
679 604
680 apps = Datastores.redis.smembers(REDIS_KEYS["apps_that_had_reports_alerting"])
681 Datastores.redis.delete(REDIS_KEYS["apps_that_had_reports_alerting"])
605 apps = Datastores.redis.smembers(REDIS_KEYS['apps_that_had_reports_alerting'])
606 Datastores.redis.delete(REDIS_KEYS['apps_that_had_reports_alerting'])
682 607 for app in apps:
683 log.warning("Notify for app: %s" % app)
684 check_alerts.delay(app.decode("utf8"))
608 log.warning('Notify for app: %s' % app)
609 check_alerts.delay(app.decode('utf8'))
685 610
686 611
687 @celery.task(
688 queue="default", soft_time_limit=3600 * 4, hard_time_limit=3600 * 4, max_retries=144
689 )
612 @celery.task(queue="default", soft_time_limit=3600 * 4,
613 hard_time_limit=3600 * 4, max_retries=144)
690 614 def logs_cleanup(resource_id, filter_settings):
691 615 request = get_current_request()
692 616 request.tm.begin()
693 es_query = {"query": {"bool": {"filter": [{"term": {"resource_id": resource_id}}]}}}
617 es_query = {
618 "_source": False,
619 "size": 5000,
620 "query": {
621 "filtered": {
622 "filter": {
623 "and": [{"term": {"resource_id": resource_id}}]
624 }
625 }
626 }
627 }
694 628
695 629 query = DBSession.query(Log).filter(Log.resource_id == resource_id)
696 if filter_settings["namespace"]:
697 query = query.filter(Log.namespace == filter_settings["namespace"][0])
698 es_query["query"]["bool"]["filter"].append(
699 {"term": {"namespace": filter_settings["namespace"][0]}}
630 if filter_settings['namespace']:
631 query = query.filter(Log.namespace == filter_settings['namespace'][0])
632 es_query['query']['filtered']['filter']['and'].append(
633 {"term": {"namespace": filter_settings['namespace'][0]}}
700 634 )
701 635 query.delete(synchronize_session=False)
702 636 request.tm.commit()
703 Datastores.es.delete_by_query(
704 index="rcae_l_*", doc_type="log", body=es_query, conflicts="proceed"
705 )
637 result = request.es_conn.search(es_query, index='rcae_l_*',
638 doc_type='log', es_scroll='1m',
639 es_search_type='scan')
640 scroll_id = result['_scroll_id']
641 while True:
642 log.warning('log_cleanup, app:{} ns:{} batch'.format(
643 resource_id,
644 filter_settings['namespace']
645 ))
646 es_docs_to_delete = []
647 result = request.es_conn.send_request(
648 'POST', ['_search', 'scroll'],
649 body=scroll_id, query_params={"scroll": '1m'})
650 scroll_id = result['_scroll_id']
651 if not result['hits']['hits']:
652 break
653 for doc in result['hits']['hits']:
654 es_docs_to_delete.append({"id": doc['_id'],
655 "index": doc['_index']})
656
657 for batch in in_batches(es_docs_to_delete, 10):
658 Datastores.es.bulk([Datastores.es.delete_op(doc_type='log',
659 **to_del)
660 for to_del in batch])
@@ -1,20 +1,19 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors
4 4 #
5 5 # Licensed under the Apache License, Version 2.0 (the "License");
6 6 # you may not use this file except in compliance with the License.
7 7 # You may obtain a copy of the License at
8 8 #
9 9 # http://www.apache.org/licenses/LICENSE-2.0
10 10 #
11 11 # Unless required by applicable law or agreed to in writing, software
12 12 # distributed under the License is distributed on an "AS IS" BASIS,
13 13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 14 # See the License for the specific language governing permissions and
15 15 # limitations under the License.
16 16
17
18 17 def filter_callable(structure, section=None):
19 structure["SOMEVAL"] = "***REMOVED***"
18 structure['SOMEVAL'] = '***REMOVED***'
20 19 return structure
This diff has been collapsed as it changes many lines, (877 lines changed) Show them Hide them
@@ -1,981 +1,896 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors
4 4 #
5 5 # Licensed under the Apache License, Version 2.0 (the "License");
6 6 # you may not use this file except in compliance with the License.
7 7 # You may obtain a copy of the License at
8 8 #
9 9 # http://www.apache.org/licenses/LICENSE-2.0
10 10 #
11 11 # Unless required by applicable law or agreed to in writing, software
12 12 # distributed under the License is distributed on an "AS IS" BASIS,
13 13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 14 # See the License for the specific language governing permissions and
15 15 # limitations under the License.
16 16
17 17 import wtforms
18 18 import formencode
19 19 import re
20 20 import pyramid.threadlocal
21 21 import datetime
22 22 import appenlight.lib.helpers as h
23 23
24 from ziggurat_foundations.models.services.user import UserService
25 from ziggurat_foundations.models.services.group import GroupService
24 from appenlight.models.user import User
25 from appenlight.models.group import Group
26 26 from appenlight.models import DBSession
27 27 from appenlight.models.alert_channel import AlertChannel
28 28 from appenlight.models.integrations import IntegrationException
29 29 from appenlight.models.integrations.campfire import CampfireIntegration
30 30 from appenlight.models.integrations.bitbucket import BitbucketIntegration
31 31 from appenlight.models.integrations.github import GithubIntegration
32 32 from appenlight.models.integrations.flowdock import FlowdockIntegration
33 33 from appenlight.models.integrations.hipchat import HipchatIntegration
34 34 from appenlight.models.integrations.jira import JiraClient
35 35 from appenlight.models.integrations.slack import SlackIntegration
36 36 from appenlight.lib.ext_json import json
37 37 from wtforms.ext.csrf.form import SecureForm
38 38 from wtforms.compat import iteritems
39 39 from collections import defaultdict
40 40
41 41 _ = str
42 42
43 43 strip_filter = lambda x: x.strip() if x else None
44 44 uppercase_filter = lambda x: x.upper() if x else None
45 45
46 FALSE_VALUES = ("false", "", False, None)
46 FALSE_VALUES = ('false', '', False, None)
47 47
48 48
49 49 class CSRFException(Exception):
50 50 pass
51 51
52 52
53 53 class ReactorForm(SecureForm):
54 def __init__(self, formdata=None, obj=None, prefix="", csrf_context=None, **kwargs):
55 super(ReactorForm, self).__init__(
56 formdata=formdata,
57 obj=obj,
58 prefix=prefix,
59 csrf_context=csrf_context,
60 **kwargs
61 )
54 def __init__(self, formdata=None, obj=None, prefix='', csrf_context=None,
55 **kwargs):
56 super(ReactorForm, self).__init__(formdata=formdata, obj=obj,
57 prefix=prefix,
58 csrf_context=csrf_context, **kwargs)
62 59 self._csrf_context = csrf_context
63 60
64 61 def generate_csrf_token(self, csrf_context):
65 62 return csrf_context.session.get_csrf_token()
66 63
67 64 def validate_csrf_token(self, field):
68 65 request = self._csrf_context or pyramid.threadlocal.get_current_request()
69 is_from_auth_token = "auth:auth_token" in request.effective_principals
66 is_from_auth_token = 'auth:auth_token' in request.effective_principals
70 67 if is_from_auth_token:
71 68 return True
72 69
73 70 if field.data != field.current_token:
74 71 # try to save the day by using token from angular
75 if request.headers.get("X-XSRF-TOKEN") != field.current_token:
76 raise CSRFException("Invalid CSRF token")
72 if request.headers.get('X-XSRF-TOKEN') != field.current_token:
73 raise CSRFException('Invalid CSRF token')
77 74
78 75 @property
79 76 def errors_dict(self):
80 77 r_dict = defaultdict(list)
81 78 for k, errors in self.errors.items():
82 79 r_dict[k].extend([str(e) for e in errors])
83 80 return r_dict
84 81
85 82 @property
86 83 def errors_json(self):
87 84 return json.dumps(self.errors_dict)
88 85
89 86 def populate_obj(self, obj, ignore_none=False):
90 87 """
91 88 Populates the attributes of the passed `obj` with data from the form's
92 89 fields.
93 90
94 91 :note: This is a destructive operation; Any attribute with the same name
95 92 as a field will be overridden. Use with caution.
96 93 """
97 94 if ignore_none:
98 95 for name, field in iteritems(self._fields):
99 96 if field.data is not None:
100 97 field.populate_obj(obj, name)
101 98 else:
102 99 for name, field in iteritems(self._fields):
103 100 field.populate_obj(obj, name)
104 101
105 102 css_classes = {}
106 103 ignore_labels = {}
107 104
108 105
109 106 class SignInForm(ReactorForm):
110 107 came_from = wtforms.HiddenField()
111 sign_in_user_name = wtforms.StringField(_("User Name"))
112 sign_in_user_password = wtforms.PasswordField(_("Password"))
108 sign_in_user_name = wtforms.StringField(_('User Name'))
109 sign_in_user_password = wtforms.PasswordField(_('Password'))
113 110
114 ignore_labels = ["submit"]
115 css_classes = {"submit": "btn btn-primary"}
111 ignore_labels = ['submit']
112 css_classes = {'submit': 'btn btn-primary'}
116 113
117 html_attrs = {
118 "sign_in_user_name": {"placeholder": "Your login"},
119 "sign_in_user_password": {"placeholder": "Your password"},
120 }
114 html_attrs = {'sign_in_user_name': {'placeholder': 'Your login'},
115 'sign_in_user_password': {
116 'placeholder': 'Your password'}}
121 117
122 118
123 119 from wtforms.widgets import html_params, HTMLString
124 120
125 121
126 def select_multi_checkbox(field, ul_class="set", **kwargs):
122 def select_multi_checkbox(field, ul_class='set', **kwargs):
127 123 """Render a multi-checkbox widget"""
128 kwargs.setdefault("type", "checkbox")
129 field_id = kwargs.pop("id", field.id)
130 html = ["<ul %s>" % html_params(id=field_id, class_=ul_class)]
124 kwargs.setdefault('type', 'checkbox')
125 field_id = kwargs.pop('id', field.id)
126 html = ['<ul %s>' % html_params(id=field_id, class_=ul_class)]
131 127 for value, label, checked in field.iter_choices():
132 choice_id = "%s-%s" % (field_id, value)
128 choice_id = '%s-%s' % (field_id, value)
133 129 options = dict(kwargs, name=field.name, value=value, id=choice_id)
134 130 if checked:
135 options["checked"] = "checked"
136 html.append("<li><input %s /> " % html_params(**options))
131 options['checked'] = 'checked'
132 html.append('<li><input %s /> ' % html_params(**options))
137 133 html.append('<label for="%s">%s</label></li>' % (choice_id, label))
138 html.append("</ul>")
139 return HTMLString("".join(html))
134 html.append('</ul>')
135 return HTMLString(''.join(html))
140 136
141 137
142 def button_widget(field, button_cls="ButtonField btn btn-default", **kwargs):
138 def button_widget(field, button_cls='ButtonField btn btn-default', **kwargs):
143 139 """Render a button widget"""
144 kwargs.setdefault("type", "button")
145 field_id = kwargs.pop("id", field.id)
146 kwargs.setdefault("value", field.label.text)
147 html = [
148 "<button %s>%s</button>"
149 % (html_params(id=field_id, class_=button_cls), kwargs["value"])
150 ]
151 return HTMLString("".join(html))
140 kwargs.setdefault('type', 'button')
141 field_id = kwargs.pop('id', field.id)
142 kwargs.setdefault('value', field.label.text)
143 html = ['<button %s>%s</button>' % (html_params(id=field_id,
144 class_=button_cls),
145 kwargs['value'],)]
146 return HTMLString(''.join(html))
152 147
153 148
154 149 def clean_whitespace(value):
155 150 if value:
156 151 return value.strip()
157 152 return value
158 153
159 154
160 155 def found_username_validator(form, field):
161 user = UserService.by_user_name(field.data)
156 user = User.by_user_name(field.data)
162 157 # sets user to recover in email validator
163 158 form.field_user = user
164 159 if not user:
165 raise wtforms.ValidationError("This username does not exist")
160 raise wtforms.ValidationError('This username does not exist')
166 161
167 162
168 163 def found_username_email_validator(form, field):
169 user = UserService.by_email(field.data)
164 user = User.by_email(field.data)
170 165 if not user:
171 raise wtforms.ValidationError("Email is incorrect")
166 raise wtforms.ValidationError('Email is incorrect')
172 167
173 168
174 169 def unique_username_validator(form, field):
175 user = UserService.by_user_name(field.data)
170 user = User.by_user_name(field.data)
176 171 if user:
177 raise wtforms.ValidationError("This username already exists in system")
172 raise wtforms.ValidationError('This username already exists in system')
178 173
179 174
180 175 def unique_groupname_validator(form, field):
181 group = GroupService.by_group_name(field.data)
182 mod_group = getattr(form, "_modified_group", None)
176 group = Group.by_group_name(field.data)
177 mod_group = getattr(form, '_modified_group', None)
183 178 if group and (not mod_group or mod_group.id != group.id):
184 raise wtforms.ValidationError("This group name already exists in system")
179 raise wtforms.ValidationError(
180 'This group name already exists in system')
185 181
186 182
187 183 def unique_email_validator(form, field):
188 user = UserService.by_email(field.data)
184 user = User.by_email(field.data)
189 185 if user:
190 raise wtforms.ValidationError("This email already exists in system")
186 raise wtforms.ValidationError('This email already exists in system')
191 187
192 188
193 189 def email_validator(form, field):
194 190 validator = formencode.validators.Email()
195 191 try:
196 192 validator.to_python(field.data)
197 193 except formencode.Invalid as e:
198 194 raise wtforms.ValidationError(e)
199 195
200 196
201 197 def unique_alert_email_validator(form, field):
202 198 q = DBSession.query(AlertChannel)
203 q = q.filter(AlertChannel.channel_name == "email")
199 q = q.filter(AlertChannel.channel_name == 'email')
204 200 q = q.filter(AlertChannel.channel_value == field.data)
205 201 email = q.first()
206 202 if email:
207 raise wtforms.ValidationError("This email already exists in alert system")
203 raise wtforms.ValidationError(
204 'This email already exists in alert system')
208 205
209 206
210 207 def blocked_email_validator(form, field):
211 208 blocked_emails = [
212 "goood-mail.org",
213 "shoeonlineblog.com",
214 "louboutinemart.com",
215 "guccibagshere.com",
216 "nikeshoesoutletforsale.com",
209 'goood-mail.org',
210 'shoeonlineblog.com',
211 'louboutinemart.com',
212 'guccibagshere.com',
213 'nikeshoesoutletforsale.com'
217 214 ]
218 data = field.data or ""
219 domain = data.split("@")[-1]
215 data = field.data or ''
216 domain = data.split('@')[-1]
220 217 if domain in blocked_emails:
221 raise wtforms.ValidationError("Don't spam")
218 raise wtforms.ValidationError('Don\'t spam')
222 219
223 220
224 221 def old_password_validator(form, field):
225 if not UserService.check_password(field.user, field.data or ""):
226 raise wtforms.ValidationError("You need to enter correct password")
222 if not field.user.check_password(field.data or ''):
223 raise wtforms.ValidationError('You need to enter correct password')
227 224
228 225
229 226 class UserRegisterForm(ReactorForm):
230 227 user_name = wtforms.StringField(
231 _("User Name"),
228 _('User Name'),
232 229 filters=[strip_filter],
233 230 validators=[
234 231 wtforms.validators.Length(min=2, max=30),
235 232 wtforms.validators.Regexp(
236 re.compile(r"^[\.\w-]+$", re.UNICODE), message="Invalid characters used"
237 ),
233 re.compile(r'^[\.\w-]+$', re.UNICODE),
234 message="Invalid characters used"),
238 235 unique_username_validator,
239 wtforms.validators.DataRequired(),
240 ],
241 )
236 wtforms.validators.DataRequired()
237 ])
242 238
243 user_password = wtforms.PasswordField(
244 _("User Password"),
245 filters=[strip_filter],
246 validators=[
247 wtforms.validators.Length(min=4),
248 wtforms.validators.DataRequired(),
249 ],
250 )
239 user_password = wtforms.PasswordField(_('User Password'),
240 filters=[strip_filter],
241 validators=[
242 wtforms.validators.Length(min=4),
243 wtforms.validators.DataRequired()
244 ])
251 245
252 email = wtforms.StringField(
253 _("Email Address"),
254 filters=[strip_filter],
255 validators=[
256 email_validator,
257 unique_email_validator,
258 blocked_email_validator,
259 wtforms.validators.DataRequired(),
260 ],
261 )
262 first_name = wtforms.HiddenField(_("First Name"))
263 last_name = wtforms.HiddenField(_("Last Name"))
246 email = wtforms.StringField(_('Email Address'),
247 filters=[strip_filter],
248 validators=[email_validator,
249 unique_email_validator,
250 blocked_email_validator,
251 wtforms.validators.DataRequired()])
252 first_name = wtforms.HiddenField(_('First Name'))
253 last_name = wtforms.HiddenField(_('Last Name'))
264 254
265 ignore_labels = ["submit"]
266 css_classes = {"submit": "btn btn-primary"}
255 ignore_labels = ['submit']
256 css_classes = {'submit': 'btn btn-primary'}
267 257
268 html_attrs = {
269 "user_name": {"placeholder": "Your login"},
270 "user_password": {"placeholder": "Your password"},
271 "email": {"placeholder": "Your email"},
272 }
258 html_attrs = {'user_name': {'placeholder': 'Your login'},
259 'user_password': {'placeholder': 'Your password'},
260 'email': {'placeholder': 'Your email'}}
273 261
274 262
275 263 class UserCreateForm(UserRegisterForm):
276 status = wtforms.BooleanField("User status", false_values=FALSE_VALUES)
264 status = wtforms.BooleanField('User status',
265 false_values=FALSE_VALUES)
277 266
278 267
279 268 class UserUpdateForm(UserCreateForm):
280 269 user_name = None
281 user_password = wtforms.PasswordField(
282 _("User Password"),
283 filters=[strip_filter],
284 validators=[wtforms.validators.Length(min=4), wtforms.validators.Optional()],
285 )
286 email = wtforms.StringField(
287 _("Email Address"),
288 filters=[strip_filter],
289 validators=[email_validator, wtforms.validators.DataRequired()],
290 )
270 user_password = wtforms.PasswordField(_('User Password'),
271 filters=[strip_filter],
272 validators=[
273 wtforms.validators.Length(min=4),
274 wtforms.validators.Optional()
275 ])
276 email = wtforms.StringField(_('Email Address'),
277 filters=[strip_filter],
278 validators=[email_validator,
279 wtforms.validators.DataRequired()])
291 280
292 281
293 282 class LostPasswordForm(ReactorForm):
294 email = wtforms.StringField(
295 _("Email Address"),
296 filters=[strip_filter],
297 validators=[
298 email_validator,
299 found_username_email_validator,
300 wtforms.validators.DataRequired(),
301 ],
302 )
283 email = wtforms.StringField(_('Email Address'),
284 filters=[strip_filter],
285 validators=[email_validator,
286 found_username_email_validator,
287 wtforms.validators.DataRequired()])
303 288
304 submit = wtforms.SubmitField(_("Reset password"))
305 ignore_labels = ["submit"]
306 css_classes = {"submit": "btn btn-primary"}
289 submit = wtforms.SubmitField(_('Reset password'))
290 ignore_labels = ['submit']
291 css_classes = {'submit': 'btn btn-primary'}
307 292
308 293
309 294 class ChangePasswordForm(ReactorForm):
310 295 old_password = wtforms.PasswordField(
311 "Old Password",
296 'Old Password',
312 297 filters=[strip_filter],
313 validators=[old_password_validator, wtforms.validators.DataRequired()],
314 )
298 validators=[old_password_validator,
299 wtforms.validators.DataRequired()])
315 300
316 301 new_password = wtforms.PasswordField(
317 "New Password",
302 'New Password',
318 303 filters=[strip_filter],
319 validators=[
320 wtforms.validators.Length(min=4),
321 wtforms.validators.DataRequired(),
322 ],
323 )
304 validators=[wtforms.validators.Length(min=4),
305 wtforms.validators.DataRequired()])
324 306 new_password_confirm = wtforms.PasswordField(
325 "Confirm Password",
307 'Confirm Password',
326 308 filters=[strip_filter],
327 validators=[
328 wtforms.validators.EqualTo("new_password"),
329 wtforms.validators.DataRequired(),
330 ],
331 )
332 submit = wtforms.SubmitField("Change Password")
333 ignore_labels = ["submit"]
334 css_classes = {"submit": "btn btn-primary"}
309 validators=[wtforms.validators.EqualTo('new_password'),
310 wtforms.validators.DataRequired()])
311 submit = wtforms.SubmitField('Change Password')
312 ignore_labels = ['submit']
313 css_classes = {'submit': 'btn btn-primary'}
335 314
336 315
337 316 class CheckPasswordForm(ReactorForm):
338 317 password = wtforms.PasswordField(
339 "Password",
318 'Password',
340 319 filters=[strip_filter],
341 validators=[old_password_validator, wtforms.validators.DataRequired()],
342 )
320 validators=[old_password_validator,
321 wtforms.validators.DataRequired()])
343 322
344 323
345 324 class NewPasswordForm(ReactorForm):
346 325 new_password = wtforms.PasswordField(
347 "New Password",
326 'New Password',
348 327 filters=[strip_filter],
349 validators=[
350 wtforms.validators.Length(min=4),
351 wtforms.validators.DataRequired(),
352 ],
353 )
328 validators=[wtforms.validators.Length(min=4),
329 wtforms.validators.DataRequired()])
354 330 new_password_confirm = wtforms.PasswordField(
355 "Confirm Password",
331 'Confirm Password',
356 332 filters=[strip_filter],
357 validators=[
358 wtforms.validators.EqualTo("new_password"),
359 wtforms.validators.DataRequired(),
360 ],
361 )
362 submit = wtforms.SubmitField("Set Password")
363 ignore_labels = ["submit"]
364 css_classes = {"submit": "btn btn-primary"}
333 validators=[wtforms.validators.EqualTo('new_password'),
334 wtforms.validators.DataRequired()])
335 submit = wtforms.SubmitField('Set Password')
336 ignore_labels = ['submit']
337 css_classes = {'submit': 'btn btn-primary'}
365 338
366 339
367 340 class CORSTextAreaField(wtforms.StringField):
368 341 """
369 342 This field represents an HTML ``<textarea>`` and can be used to take
370 343 multi-line input.
371 344 """
372
373 345 widget = wtforms.widgets.TextArea()
374 346
375 347 def process_formdata(self, valuelist):
376 348 self.data = []
377 349 if valuelist:
378 data = [x.strip() for x in valuelist[0].split("\n")]
350 data = [x.strip() for x in valuelist[0].split('\n')]
379 351 for d in data:
380 352 if not d:
381 353 continue
382 if d.startswith("www."):
354 if d.startswith('www.'):
383 355 d = d[4:]
384 356 if data:
385 357 self.data.append(d)
386 358 else:
387 359 self.data = []
388 self.data = "\n".join(self.data)
360 self.data = '\n'.join(self.data)
389 361
390 362
391 363 class ApplicationCreateForm(ReactorForm):
392 364 resource_name = wtforms.StringField(
393 _("Application name"),
365 _('Application name'),
394 366 filters=[strip_filter],
395 validators=[
396 wtforms.validators.Length(min=1),
397 wtforms.validators.DataRequired(),
398 ],
399 )
367 validators=[wtforms.validators.Length(min=1),
368 wtforms.validators.DataRequired()])
400 369
401 370 domains = CORSTextAreaField(
402 _("Domain names for CORS headers "),
403 validators=[wtforms.validators.Length(min=1), wtforms.validators.Optional()],
404 description="Required for Javascript error "
405 "tracking (one line one domain, skip http:// part)",
406 )
371 _('Domain names for CORS headers '),
372 validators=[wtforms.validators.Length(min=1),
373 wtforms.validators.Optional()],
374 description='Required for Javascript error '
375 'tracking (one line one domain, skip http:// part)')
407 376
408 submit = wtforms.SubmitField(_("Create Application"))
377 submit = wtforms.SubmitField(_('Create Application'))
409 378
410 ignore_labels = ["submit"]
411 css_classes = {"submit": "btn btn-primary"}
412 html_attrs = {
413 "resource_name": {"placeholder": "Application Name"},
414 "uptime_url": {"placeholder": "http://somedomain.com"},
415 }
379 ignore_labels = ['submit']
380 css_classes = {'submit': 'btn btn-primary'}
381 html_attrs = {'resource_name': {'placeholder': 'Application Name'},
382 'uptime_url': {'placeholder': 'http://somedomain.com'}}
416 383
417 384
418 385 class ApplicationUpdateForm(ApplicationCreateForm):
419 386 default_grouping = wtforms.SelectField(
420 _("Default grouping for errors"),
421 choices=[
422 ("url_type", "Error Type + location"),
423 ("url_traceback", "Traceback + location"),
424 ("traceback_server", "Traceback + Server"),
425 ],
426 default="url_traceback",
427 )
387 _('Default grouping for errors'),
388 choices=[('url_type', 'Error Type + location',),
389 ('url_traceback', 'Traceback + location',),
390 ('traceback_server', 'Traceback + Server',)],
391 default='url_traceback')
428 392
429 393 error_report_threshold = wtforms.IntegerField(
430 _("Alert on error reports"),
394 _('Alert on error reports'),
431 395 validators=[
432 396 wtforms.validators.NumberRange(min=1),
433 wtforms.validators.DataRequired(),
397 wtforms.validators.DataRequired()
434 398 ],
435 description="Application requires to send at least this amount of "
436 "error reports per minute to open alert",
399 description='Application requires to send at least this amount of '
400 'error reports per minute to open alert'
437 401 )
438 402
439 403 slow_report_threshold = wtforms.IntegerField(
440 _("Alert on slow reports"),
441 validators=[
442 wtforms.validators.NumberRange(min=1),
443 wtforms.validators.DataRequired(),
444 ],
445 description="Application requires to send at least this amount of "
446 "slow reports per minute to open alert",
447 )
404 _('Alert on slow reports'),
405 validators=[wtforms.validators.NumberRange(min=1),
406 wtforms.validators.DataRequired()],
407 description='Application requires to send at least this amount of '
408 'slow reports per minute to open alert')
448 409
449 410 allow_permanent_storage = wtforms.BooleanField(
450 _("Permanent logs"),
411 _('Permanent logs'),
451 412 false_values=FALSE_VALUES,
452 description=_("Allow permanent storage of logs in separate DB partitions"),
453 )
413 description=_(
414 'Allow permanent storage of logs in separate DB partitions'))
454 415
455 submit = wtforms.SubmitField(_("Create Application"))
416 submit = wtforms.SubmitField(_('Create Application'))
456 417
457 418
458 419 class UserSearchSchemaForm(ReactorForm):
459 user_name = wtforms.StringField("User Name", filters=[strip_filter])
420 user_name = wtforms.StringField('User Name',
421 filters=[strip_filter], )
460 422
461 submit = wtforms.SubmitField(_("Search User"))
462 ignore_labels = ["submit"]
463 css_classes = {"submit": "btn btn-primary"}
423 submit = wtforms.SubmitField(_('Search User'))
424 ignore_labels = ['submit']
425 css_classes = {'submit': 'btn btn-primary'}
464 426
465 427 '<li class="user_exists"><span></span></li>'
466 428
467 429
468 430 class YesNoForm(ReactorForm):
469 no = wtforms.SubmitField("No", default="")
470 yes = wtforms.SubmitField("Yes", default="")
471 ignore_labels = ["submit"]
472 css_classes = {"submit": "btn btn-primary"}
431 no = wtforms.SubmitField('No', default='')
432 yes = wtforms.SubmitField('Yes', default='')
433 ignore_labels = ['submit']
434 css_classes = {'submit': 'btn btn-primary'}
473 435
474 436
475 status_codes = [("", "All"), ("500", "500"), ("404", "404")]
437 status_codes = [('', 'All',), ('500', '500',), ('404', '404',)]
476 438
477 priorities = [("", "All")]
439 priorities = [('', 'All',)]
478 440 for i in range(1, 11):
479 priorities.append((str(i), str(i)))
441 priorities.append((str(i), str(i),))
480 442
481 report_status_choices = [
482 ("", "All"),
483 ("never_reviewed", "Never revieved"),
484 ("reviewed", "Revieved"),
485 ("public", "Public"),
486 ("fixed", "Fixed"),
487 ]
443 report_status_choices = [('', 'All',),
444 ('never_reviewed', 'Never revieved',),
445 ('reviewed', 'Revieved',),
446 ('public', 'Public',),
447 ('fixed', 'Fixed',), ]
488 448
489 449
490 450 class ReportBrowserForm(ReactorForm):
491 applications = wtforms.SelectMultipleField(
492 "Applications", widget=select_multi_checkbox
493 )
494 http_status = wtforms.SelectField("HTTP Status", choices=status_codes)
495 priority = wtforms.SelectField("Priority", choices=priorities, default="")
496 start_date = wtforms.DateField("Start Date")
497 end_date = wtforms.DateField("End Date")
498 error = wtforms.StringField("Error")
499 url_path = wtforms.StringField("URL Path")
500 url_domain = wtforms.StringField("URL Domain")
501 report_status = wtforms.SelectField(
502 "Report status", choices=report_status_choices, default=""
503 )
504 submit = wtforms.SubmitField(
505 '<span class="glyphicon glyphicon-search">' "</span> Filter results",
506 widget=button_widget,
507 )
508
509 ignore_labels = ["submit"]
510 css_classes = {"submit": "btn btn-primary"}
511
512
513 slow_report_status_choices = [
514 ("", "All"),
515 ("never_reviewed", "Never revieved"),
516 ("reviewed", "Revieved"),
517 ("public", "Public"),
518 ]
451 applications = wtforms.SelectMultipleField('Applications',
452 widget=select_multi_checkbox)
453 http_status = wtforms.SelectField('HTTP Status', choices=status_codes)
454 priority = wtforms.SelectField('Priority', choices=priorities, default='')
455 start_date = wtforms.DateField('Start Date')
456 end_date = wtforms.DateField('End Date')
457 error = wtforms.StringField('Error')
458 url_path = wtforms.StringField('URL Path')
459 url_domain = wtforms.StringField('URL Domain')
460 report_status = wtforms.SelectField('Report status',
461 choices=report_status_choices,
462 default='')
463 submit = wtforms.SubmitField('<span class="glyphicon glyphicon-search">'
464 '</span> Filter results',
465 widget=button_widget)
466
467 ignore_labels = ['submit']
468 css_classes = {'submit': 'btn btn-primary'}
469
470
471 slow_report_status_choices = [('', 'All',),
472 ('never_reviewed', 'Never revieved',),
473 ('reviewed', 'Revieved',),
474 ('public', 'Public',), ]
519 475
520 476
521 477 class BulkOperationForm(ReactorForm):
522 applications = wtforms.SelectField("Applications")
478 applications = wtforms.SelectField('Applications')
523 479 start_date = wtforms.DateField(
524 "Start Date",
525 default=lambda: datetime.datetime.utcnow() - datetime.timedelta(days=90),
526 )
527 end_date = wtforms.DateField("End Date")
480 'Start Date',
481 default=lambda: datetime.datetime.utcnow() - datetime.timedelta(
482 days=90))
483 end_date = wtforms.DateField('End Date')
528 484 confirm = wtforms.BooleanField(
529 "Confirm operation", validators=[wtforms.validators.DataRequired()]
530 )
485 'Confirm operation',
486 validators=[wtforms.validators.DataRequired()])
531 487
532 488
533 489 class LogBrowserForm(ReactorForm):
534 applications = wtforms.SelectMultipleField(
535 "Applications", widget=select_multi_checkbox
536 )
537 start_date = wtforms.DateField("Start Date")
538 log_level = wtforms.StringField("Log level")
539 message = wtforms.StringField("Message")
540 namespace = wtforms.StringField("Namespace")
490 applications = wtforms.SelectMultipleField('Applications',
491 widget=select_multi_checkbox)
492 start_date = wtforms.DateField('Start Date')
493 log_level = wtforms.StringField('Log level')
494 message = wtforms.StringField('Message')
495 namespace = wtforms.StringField('Namespace')
541 496 submit = wtforms.SubmitField(
542 497 '<span class="glyphicon glyphicon-search"></span> Filter results',
543 widget=button_widget,
544 )
545 ignore_labels = ["submit"]
546 css_classes = {"submit": "btn btn-primary"}
498 widget=button_widget)
499 ignore_labels = ['submit']
500 css_classes = {'submit': 'btn btn-primary'}
547 501
548 502
549 503 class CommentForm(ReactorForm):
550 body = wtforms.TextAreaField(
551 "Comment",
552 validators=[
553 wtforms.validators.Length(min=1),
554 wtforms.validators.DataRequired(),
555 ],
556 )
557 submit = wtforms.SubmitField("Comment")
558 ignore_labels = ["submit"]
559 css_classes = {"submit": "btn btn-primary"}
504 body = wtforms.TextAreaField('Comment', validators=[
505 wtforms.validators.Length(min=1),
506 wtforms.validators.DataRequired()
507 ])
508 submit = wtforms.SubmitField('Comment', )
509 ignore_labels = ['submit']
510 css_classes = {'submit': 'btn btn-primary'}
560 511
561 512
562 513 class EmailChannelCreateForm(ReactorForm):
563 email = wtforms.StringField(
564 _("Email Address"),
565 filters=[strip_filter],
566 validators=[
567 email_validator,
568 unique_alert_email_validator,
569 wtforms.validators.DataRequired(),
570 ],
571 )
572 submit = wtforms.SubmitField("Add email channel")
573 ignore_labels = ["submit"]
574 css_classes = {"submit": "btn btn-primary"}
514 email = wtforms.StringField(_('Email Address'),
515 filters=[strip_filter],
516 validators=[email_validator,
517 unique_alert_email_validator,
518 wtforms.validators.DataRequired()])
519 submit = wtforms.SubmitField('Add email channel', )
520 ignore_labels = ['submit']
521 css_classes = {'submit': 'btn btn-primary'}
575 522
576 523
577 524 def gen_user_profile_form():
578 525 class UserProfileForm(ReactorForm):
579 526 email = wtforms.StringField(
580 _("Email Address"),
581 validators=[email_validator, wtforms.validators.DataRequired()],
582 )
583 first_name = wtforms.StringField(_("First Name"))
584 last_name = wtforms.StringField(_("Last Name"))
585 company_name = wtforms.StringField(_("Company Name"))
586 company_address = wtforms.TextAreaField(_("Company Address"))
587 zip_code = wtforms.StringField(_("ZIP code"))
588 city = wtforms.StringField(_("City"))
589 notifications = wtforms.BooleanField(
590 "Account notifications", false_values=FALSE_VALUES
591 )
592 submit = wtforms.SubmitField(_("Update Account"))
593 ignore_labels = ["submit"]
594 css_classes = {"submit": "btn btn-primary"}
527 _('Email Address'),
528 validators=[email_validator, wtforms.validators.DataRequired()])
529 first_name = wtforms.StringField(_('First Name'))
530 last_name = wtforms.StringField(_('Last Name'))
531 company_name = wtforms.StringField(_('Company Name'))
532 company_address = wtforms.TextAreaField(_('Company Address'))
533 zip_code = wtforms.StringField(_('ZIP code'))
534 city = wtforms.StringField(_('City'))
535 notifications = wtforms.BooleanField('Account notifications',
536 false_values=FALSE_VALUES)
537 submit = wtforms.SubmitField(_('Update Account'))
538 ignore_labels = ['submit']
539 css_classes = {'submit': 'btn btn-primary'}
595 540
596 541 return UserProfileForm
597 542
598 543
599 544 class PurgeAppForm(ReactorForm):
600 545 resource_id = wtforms.HiddenField(
601 "App Id", validators=[wtforms.validators.DataRequired()]
602 )
603 days = wtforms.IntegerField("Days", validators=[wtforms.validators.DataRequired()])
546 'App Id',
547 validators=[wtforms.validators.DataRequired()])
548 days = wtforms.IntegerField(
549 'Days',
550 validators=[wtforms.validators.DataRequired()])
604 551 password = wtforms.PasswordField(
605 "Admin Password",
606 validators=[old_password_validator, wtforms.validators.DataRequired()],
607 )
608 submit = wtforms.SubmitField(_("Purge Data"))
609 ignore_labels = ["submit"]
610 css_classes = {"submit": "btn btn-primary"}
552 'Admin Password',
553 validators=[old_password_validator, wtforms.validators.DataRequired()])
554 submit = wtforms.SubmitField(_('Purge Data'))
555 ignore_labels = ['submit']
556 css_classes = {'submit': 'btn btn-primary'}
611 557
612 558
613 559 class IntegrationRepoForm(ReactorForm):
614 host_name = wtforms.StringField("Service Host", default="")
560 host_name = wtforms.StringField("Service Host", default='')
615 561 user_name = wtforms.StringField(
616 562 "User Name",
617 563 filters=[strip_filter],
618 validators=[
619 wtforms.validators.DataRequired(),
620 wtforms.validators.Length(min=1),
621 ],
622 )
564 validators=[wtforms.validators.DataRequired(),
565 wtforms.validators.Length(min=1)])
623 566 repo_name = wtforms.StringField(
624 567 "Repo Name",
625 568 filters=[strip_filter],
626 validators=[
627 wtforms.validators.DataRequired(),
628 wtforms.validators.Length(min=1),
629 ],
630 )
569 validators=[wtforms.validators.DataRequired(),
570 wtforms.validators.Length(min=1)])
631 571
632 572
633 573 class IntegrationBitbucketForm(IntegrationRepoForm):
634 host_name = wtforms.StringField("Service Host", default="https://bitbucket.org")
574 host_name = wtforms.StringField("Service Host",
575 default='https://bitbucket.org')
635 576
636 577 def validate_user_name(self, field):
637 578 try:
638 579 request = pyramid.threadlocal.get_current_request()
639 580 client = BitbucketIntegration.create_client(
640 request, self.user_name.data, self.repo_name.data
641 )
581 request,
582 self.user_name.data,
583 self.repo_name.data)
642 584 client.get_assignees()
643 585 except IntegrationException as e:
644 586 raise wtforms.validators.ValidationError(str(e))
645 587
646 588
647 589 class IntegrationGithubForm(IntegrationRepoForm):
648 host_name = wtforms.StringField("Service Host", default="https://github.com")
590 host_name = wtforms.StringField("Service Host",
591 default='https://github.com')
649 592
650 593 def validate_user_name(self, field):
651 594 try:
652 595 request = pyramid.threadlocal.get_current_request()
653 596 client = GithubIntegration.create_client(
654 request, self.user_name.data, self.repo_name.data
655 )
597 request,
598 self.user_name.data,
599 self.repo_name.data)
656 600 client.get_assignees()
657 601 except IntegrationException as e:
658 602 raise wtforms.validators.ValidationError(str(e))
659 603 raise wtforms.validators.ValidationError(str(e))
660 604
661 605
662 606 def filter_rooms(data):
663 607 if data is not None:
664 rooms = data.split(",")
665 return ",".join([r.strip() for r in rooms])
608 rooms = data.split(',')
609 return ','.join([r.strip() for r in rooms])
666 610
667 611
668 612 class IntegrationCampfireForm(ReactorForm):
669 613 account = wtforms.StringField(
670 "Account",
614 'Account',
671 615 filters=[strip_filter],
672 validators=[wtforms.validators.DataRequired()],
673 )
616 validators=[wtforms.validators.DataRequired()])
674 617 api_token = wtforms.StringField(
675 "Api Token",
618 'Api Token',
676 619 filters=[strip_filter],
677 validators=[wtforms.validators.DataRequired()],
678 )
679 rooms = wtforms.StringField("Room ID list", filters=[filter_rooms])
620 validators=[wtforms.validators.DataRequired()])
621 rooms = wtforms.StringField('Room ID list', filters=[filter_rooms])
680 622
681 623 def validate_api_token(self, field):
682 624 try:
683 client = CampfireIntegration.create_client(
684 self.api_token.data, self.account.data
685 )
625 client = CampfireIntegration.create_client(self.api_token.data,
626 self.account.data)
686 627 client.get_account()
687 628 except IntegrationException as e:
688 629 raise wtforms.validators.ValidationError(str(e))
689 630
690 631 def validate_rooms(self, field):
691 632 if not field.data:
692 633 return
693 client = CampfireIntegration.create_client(
694 self.api_token.data, self.account.data
695 )
634 client = CampfireIntegration.create_client(self.api_token.data,
635 self.account.data)
696 636
697 637 try:
698 room_list = [r["id"] for r in client.get_rooms()]
638 room_list = [r['id'] for r in client.get_rooms()]
699 639 except IntegrationException as e:
700 640 raise wtforms.validators.ValidationError(str(e))
701 641
702 rooms = field.data.split(",")
642 rooms = field.data.split(',')
703 643 if len(rooms) > 3:
704 msg = "You can use up to 3 room ids"
644 msg = 'You can use up to 3 room ids'
705 645 raise wtforms.validators.ValidationError(msg)
706 646 if rooms:
707 647 for room_id in rooms:
708 648 if int(room_id) not in room_list:
709 649 msg = "Room %s doesn't exist"
710 650 raise wtforms.validators.ValidationError(msg % room_id)
711 651 if not room_id.strip().isdigit():
712 msg = "You must use only integers for room ids"
652 msg = 'You must use only integers for room ids'
713 653 raise wtforms.validators.ValidationError(msg)
714 654
715 submit = wtforms.SubmitField(_("Connect to Campfire"))
716 ignore_labels = ["submit"]
717 css_classes = {"submit": "btn btn-primary"}
655 submit = wtforms.SubmitField(_('Connect to Campfire'))
656 ignore_labels = ['submit']
657 css_classes = {'submit': 'btn btn-primary'}
718 658
719 659
720 660 def filter_rooms(data):
721 661 if data is not None:
722 rooms = data.split(",")
723 return ",".join([r.strip() for r in rooms])
662 rooms = data.split(',')
663 return ','.join([r.strip() for r in rooms])
724 664
725 665
726 666 class IntegrationHipchatForm(ReactorForm):
727 667 api_token = wtforms.StringField(
728 "Api Token",
668 'Api Token',
729 669 filters=[strip_filter],
730 validators=[wtforms.validators.DataRequired()],
731 )
670 validators=[wtforms.validators.DataRequired()])
732 671 rooms = wtforms.StringField(
733 "Room ID list",
672 'Room ID list',
734 673 filters=[filter_rooms],
735 validators=[wtforms.validators.DataRequired()],
736 )
674 validators=[wtforms.validators.DataRequired()])
737 675
738 676 def validate_rooms(self, field):
739 677 if not field.data:
740 678 return
741 679 client = HipchatIntegration.create_client(self.api_token.data)
742 rooms = field.data.split(",")
680 rooms = field.data.split(',')
743 681 if len(rooms) > 3:
744 msg = "You can use up to 3 room ids"
682 msg = 'You can use up to 3 room ids'
745 683 raise wtforms.validators.ValidationError(msg)
746 684 if rooms:
747 685 for room_id in rooms:
748 686 if not room_id.strip().isdigit():
749 msg = "You must use only integers for room ids"
687 msg = 'You must use only integers for room ids'
750 688 raise wtforms.validators.ValidationError(msg)
751 689 try:
752 client.send(
753 {
754 "message_format": "text",
755 "message": "testing for room existence",
756 "from": "AppEnlight",
757 "room_id": room_id,
758 "color": "green",
759 }
760 )
690 client.send({
691 "message_format": 'text',
692 "message": "testing for room existence",
693 "from": "AppEnlight",
694 "room_id": room_id,
695 "color": "green"
696 })
761 697 except IntegrationException as exc:
762 msg = "Room id: %s exception: %s"
763 raise wtforms.validators.ValidationError(msg % (room_id, exc))
698 msg = 'Room id: %s exception: %s'
699 raise wtforms.validators.ValidationError(msg % (room_id,
700 exc))
764 701
765 702
766 703 class IntegrationFlowdockForm(ReactorForm):
767 api_token = wtforms.StringField(
768 "API Token",
769 filters=[strip_filter],
770 validators=[wtforms.validators.DataRequired()],
771 )
704 api_token = wtforms.StringField('API Token',
705 filters=[strip_filter],
706 validators=[
707 wtforms.validators.DataRequired()
708 ], )
772 709
773 710 def validate_api_token(self, field):
774 711 try:
775 712 client = FlowdockIntegration.create_client(self.api_token.data)
776 713 registry = pyramid.threadlocal.get_current_registry()
777 714 payload = {
778 "source": registry.settings["mailing.from_name"],
779 "from_address": registry.settings["mailing.from_email"],
715 "source": registry.settings['mailing.from_name'],
716 "from_address": registry.settings['mailing.from_email'],
780 717 "subject": "Integration test",
781 718 "content": "If you can see this it was successful",
782 719 "tags": ["appenlight"],
783 "link": registry.settings["mailing.app_url"],
720 "link": registry.settings['mailing.app_url']
784 721 }
785 722 client.send_to_inbox(payload)
786 723 except IntegrationException as e:
787 724 raise wtforms.validators.ValidationError(str(e))
788 725
789 726
790 727 class IntegrationSlackForm(ReactorForm):
791 728 webhook_url = wtforms.StringField(
792 "Reports webhook",
729 'Reports webhook',
793 730 filters=[strip_filter],
794 validators=[wtforms.validators.DataRequired()],
795 )
731 validators=[wtforms.validators.DataRequired()])
796 732
797 733 def validate_webhook_url(self, field):
798 734 registry = pyramid.threadlocal.get_current_registry()
799 735 client = SlackIntegration.create_client(field.data)
800 link = "<%s|%s>" % (
801 registry.settings["mailing.app_url"],
802 registry.settings["mailing.from_name"],
803 )
736 link = "<%s|%s>" % (registry.settings['mailing.app_url'],
737 registry.settings['mailing.from_name'])
804 738 test_data = {
805 739 "username": "AppEnlight",
806 740 "icon_emoji": ":fire:",
807 741 "attachments": [
808 {
809 "fallback": "Testing integration channel: %s" % link,
810 "pretext": "Testing integration channel: %s" % link,
811 "color": "good",
812 "fields": [
813 {
814 "title": "Status",
815 "value": "Integration is working fine",
816 "short": False,
817 }
818 ],
819 }
820 ],
742 {"fallback": "Testing integration channel: %s" % link,
743 "pretext": "Testing integration channel: %s" % link,
744 "color": "good",
745 "fields": [
746 {
747 "title": "Status",
748 "value": "Integration is working fine",
749 "short": False
750 }
751 ]}
752 ]
821 753 }
822 754 try:
823 755 client.make_request(data=test_data)
824 756 except IntegrationException as exc:
825 757 raise wtforms.validators.ValidationError(str(exc))
826 758
827 759
828 760 class IntegrationWebhooksForm(ReactorForm):
829 761 reports_webhook = wtforms.StringField(
830 "Reports webhook",
762 'Reports webhook',
831 763 filters=[strip_filter],
832 validators=[wtforms.validators.DataRequired()],
833 )
764 validators=[wtforms.validators.DataRequired()])
834 765 alerts_webhook = wtforms.StringField(
835 "Alerts webhook",
766 'Alerts webhook',
836 767 filters=[strip_filter],
837 validators=[wtforms.validators.DataRequired()],
838 )
839 submit = wtforms.SubmitField(_("Setup webhooks"))
840 ignore_labels = ["submit"]
841 css_classes = {"submit": "btn btn-primary"}
768 validators=[wtforms.validators.DataRequired()])
769 submit = wtforms.SubmitField(_('Setup webhooks'))
770 ignore_labels = ['submit']
771 css_classes = {'submit': 'btn btn-primary'}
842 772
843 773
844 774 class IntegrationJiraForm(ReactorForm):
845 775 host_name = wtforms.StringField(
846 "Server URL",
776 'Server URL',
847 777 filters=[strip_filter],
848 validators=[wtforms.validators.DataRequired()],
849 )
778 validators=[wtforms.validators.DataRequired()])
850 779 user_name = wtforms.StringField(
851 "Username",
780 'Username',
852 781 filters=[strip_filter],
853 validators=[wtforms.validators.DataRequired()],
854 )
782 validators=[wtforms.validators.DataRequired()])
855 783 password = wtforms.PasswordField(
856 "Password",
784 'Password',
857 785 filters=[strip_filter],
858 validators=[wtforms.validators.DataRequired()],
859 )
786 validators=[wtforms.validators.DataRequired()])
860 787 project = wtforms.StringField(
861 "Project key",
788 'Project key',
862 789 filters=[uppercase_filter, strip_filter],
863 validators=[wtforms.validators.DataRequired()],
864 )
790 validators=[wtforms.validators.DataRequired()])
865 791
866 792 def validate_project(self, field):
867 793 if not field.data:
868 794 return
869 795 try:
870 client = JiraClient(
871 self.user_name.data,
872 self.password.data,
873 self.host_name.data,
874 self.project.data,
875 )
796 client = JiraClient(self.user_name.data,
797 self.password.data,
798 self.host_name.data,
799 self.project.data)
876 800 except Exception as exc:
877 801 raise wtforms.validators.ValidationError(str(exc))
878 802
879 803 room_list = [r.key.upper() for r in client.get_projects()]
880 804 if field.data.upper() not in room_list:
881 805 msg = "Project %s doesn\t exist in your Jira Instance"
882 806 raise wtforms.validators.ValidationError(msg % field.data)
883 807
884 808
885 809 def get_deletion_form(resource):
886 810 class F(ReactorForm):
887 811 application_name = wtforms.StringField(
888 "Application Name",
812 'Application Name',
889 813 filters=[strip_filter],
890 validators=[wtforms.validators.AnyOf([resource.resource_name])],
891 )
814 validators=[wtforms.validators.AnyOf([resource.resource_name])])
892 815 resource_id = wtforms.HiddenField(default=resource.resource_id)
893 submit = wtforms.SubmitField(_("Delete my application"))
894 ignore_labels = ["submit"]
895 css_classes = {"submit": "btn btn-danger"}
816 submit = wtforms.SubmitField(_('Delete my application'))
817 ignore_labels = ['submit']
818 css_classes = {'submit': 'btn btn-danger'}
896 819
897 820 return F
898 821
899 822
900 823 class ChangeApplicationOwnerForm(ReactorForm):
901 824 password = wtforms.PasswordField(
902 "Password",
825 'Password',
903 826 filters=[strip_filter],
904 validators=[old_password_validator, wtforms.validators.DataRequired()],
905 )
827 validators=[old_password_validator,
828 wtforms.validators.DataRequired()])
906 829
907 830 user_name = wtforms.StringField(
908 "New owners username",
831 'New owners username',
909 832 filters=[strip_filter],
910 validators=[found_username_validator, wtforms.validators.DataRequired()],
911 )
912 submit = wtforms.SubmitField(_("Transfer ownership of application"))
913 ignore_labels = ["submit"]
914 css_classes = {"submit": "btn btn-danger"}
833 validators=[found_username_validator,
834 wtforms.validators.DataRequired()])
835 submit = wtforms.SubmitField(_('Transfer ownership of application'))
836 ignore_labels = ['submit']
837 css_classes = {'submit': 'btn btn-danger'}
915 838
916 839
917 840 def default_filename():
918 return "Invoice %s" % datetime.datetime.utcnow().strftime("%Y/%m")
841 return 'Invoice %s' % datetime.datetime.utcnow().strftime('%Y/%m')
919 842
920 843
921 844 class FileUploadForm(ReactorForm):
922 title = wtforms.StringField(
923 "File Title",
924 default=default_filename,
925 validators=[wtforms.validators.DataRequired()],
926 )
927 file = wtforms.FileField("File")
845 title = wtforms.StringField('File Title',
846 default=default_filename,
847 validators=[wtforms.validators.DataRequired()])
848 file = wtforms.FileField('File')
928 849
929 850 def validate_file(self, field):
930 if not hasattr(field.data, "file"):
931 raise wtforms.ValidationError("File is missing")
851 if not hasattr(field.data, 'file'):
852 raise wtforms.ValidationError('File is missing')
932 853
933 submit = wtforms.SubmitField(_("Upload"))
854 submit = wtforms.SubmitField(_('Upload'))
934 855
935 856
936 857 def get_partition_deletion_form(es_indices, pg_indices):
937 858 class F(ReactorForm):
938 es_index = wtforms.SelectMultipleField(
939 "Elasticsearch", choices=[(ix, "") for ix in es_indices]
940 )
941 pg_index = wtforms.SelectMultipleField(
942 "pg", choices=[(ix, "") for ix in pg_indices]
943 )
944 confirm = wtforms.TextField(
945 "Confirm",
946 filters=[uppercase_filter, strip_filter],
947 validators=[
948 wtforms.validators.AnyOf(["CONFIRM"]),
949 wtforms.validators.DataRequired(),
950 ],
951 )
952 ignore_labels = ["submit"]
953 css_classes = {"submit": "btn btn-danger"}
859 es_index = wtforms.SelectMultipleField('Elasticsearch',
860 choices=[(ix, '') for ix in
861 es_indices])
862 pg_index = wtforms.SelectMultipleField('pg',
863 choices=[(ix, '') for ix in
864 pg_indices])
865 confirm = wtforms.TextField('Confirm',
866 filters=[uppercase_filter, strip_filter],
867 validators=[
868 wtforms.validators.AnyOf(['CONFIRM']),
869 wtforms.validators.DataRequired()])
870 ignore_labels = ['submit']
871 css_classes = {'submit': 'btn btn-danger'}
954 872
955 873 return F
956 874
957 875
958 876 class GroupCreateForm(ReactorForm):
959 877 group_name = wtforms.StringField(
960 _("Group Name"),
878 _('Group Name'),
961 879 filters=[strip_filter],
962 880 validators=[
963 881 wtforms.validators.Length(min=2, max=50),
964 882 unique_groupname_validator,
965 wtforms.validators.DataRequired(),
966 ],
967 )
968 description = wtforms.StringField(_("Group description"))
883 wtforms.validators.DataRequired()
884 ])
885 description = wtforms.StringField(_('Group description'))
969 886
970 887
971 time_choices = [(k, v["label"]) for k, v in h.time_deltas.items()]
888 time_choices = [(k, v['label'],) for k, v in h.time_deltas.items()]
972 889
973 890
974 891 class AuthTokenCreateForm(ReactorForm):
975 description = wtforms.StringField(_("Token description"))
976 expires = wtforms.SelectField(
977 "Expires",
978 coerce=lambda x: x,
979 choices=time_choices,
980 validators=[wtforms.validators.Optional()],
981 )
892 description = wtforms.StringField(_('Token description'))
893 expires = wtforms.SelectField('Expires',
894 coerce=lambda x: x,
895 choices=time_choices,
896 validators=[wtforms.validators.Optional()])
@@ -1,49 +1,50 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors
4 4 #
5 5 # Licensed under the Apache License, Version 2.0 (the "License");
6 6 # you may not use this file except in compliance with the License.
7 7 # You may obtain a copy of the License at
8 8 #
9 9 # http://www.apache.org/licenses/LICENSE-2.0
10 10 #
11 11 # Unless required by applicable law or agreed to in writing, software
12 12 # distributed under the License is distributed on an "AS IS" BASIS,
13 13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 14 # See the License for the specific language governing permissions and
15 15 # limitations under the License.
16 16
17 17 """Miscellaneous support packages for {{project}}.
18 18 """
19 19 import random
20 20 import string
21 21 import importlib
22 22
23 23 from appenlight_client.exceptions import get_current_traceback
24 24
25 25
26 26 def generate_random_string(chars=10):
27 return "".join(random.sample(string.ascii_letters * 2 + string.digits, chars))
27 return ''.join(random.sample(string.ascii_letters * 2 + string.digits,
28 chars))
28 29
29 30
30 31 def to_integer_safe(input):
31 32 try:
32 33 return int(input)
33 except (TypeError, ValueError):
34 except (TypeError, ValueError,):
34 35 return None
35 36
36 37
37 38 def print_traceback(log):
38 traceback = get_current_traceback(
39 skip=1, show_hidden_frames=True, ignore_system_exceptions=True
40 )
39 traceback = get_current_traceback(skip=1, show_hidden_frames=True,
40 ignore_system_exceptions=True)
41 41 exception_text = traceback.exception
42 42 log.error(exception_text)
43 43 log.error(traceback.plaintext)
44 44 del traceback
45 45
46 46
47 47 def get_callable(import_string):
48 import_module, indexer_callable = import_string.split(":")
49 return getattr(importlib.import_module(import_module), indexer_callable)
48 import_module, indexer_callable = import_string.split(':')
49 return getattr(importlib.import_module(import_module),
50 indexer_callable)
@@ -1,80 +1,81 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors
4 4 #
5 5 # Licensed under the Apache License, Version 2.0 (the "License");
6 6 # you may not use this file except in compliance with the License.
7 7 # You may obtain a copy of the License at
8 8 #
9 9 # http://www.apache.org/licenses/LICENSE-2.0
10 10 #
11 11 # Unless required by applicable law or agreed to in writing, software
12 12 # distributed under the License is distributed on an "AS IS" BASIS,
13 13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 14 # See the License for the specific language governing permissions and
15 15 # limitations under the License.
16 16
17 17 import datetime
18 18 import logging
19 19
20 20 from pyramid.httpexceptions import HTTPForbidden, HTTPTooManyRequests
21 21
22 from appenlight.models import Datastores
22 23 from appenlight.models.services.config import ConfigService
23 24 from appenlight.lib.redis_keys import REDIS_KEYS
24 25
25 26 log = logging.getLogger(__name__)
26 27
27 28
28 29 def rate_limiting(request, resource, section, to_increment=1):
29 30 tsample = datetime.datetime.utcnow().replace(second=0, microsecond=0)
30 key = REDIS_KEYS["rate_limits"][section].format(tsample, resource.resource_id)
31 key = REDIS_KEYS['rate_limits'][section].format(tsample,
32 resource.resource_id)
31 33 redis_pipeline = request.registry.redis_conn.pipeline()
32 34 redis_pipeline.incr(key, to_increment)
33 35 redis_pipeline.expire(key, 3600 * 24)
34 36 results = redis_pipeline.execute()
35 37 current_count = results[0]
36 config = ConfigService.by_key_and_section(section, "global")
38 config = ConfigService.by_key_and_section(section, 'global')
37 39 limit = config.value if config else 1000
38 40 if current_count > int(limit):
39 log.info("RATE LIMITING: {}: {}, {}".format(section, resource, current_count))
40 abort_msg = "Rate limits are in effect for this application"
41 raise HTTPTooManyRequests(abort_msg, headers={"X-AppEnlight": abort_msg})
41 log.info('RATE LIMITING: {}: {}, {}'.format(
42 section, resource, current_count))
43 abort_msg = 'Rate limits are in effect for this application'
44 raise HTTPTooManyRequests(abort_msg,
45 headers={'X-AppEnlight': abort_msg})
42 46
43 47
44 48 def check_cors(request, application, should_return=True):
45 49 """
46 50 Performs a check and validation if request comes from authorized domain for
47 51 application, otherwise return 403
48 52 """
49 53 origin_found = False
50 origin = request.headers.get("Origin")
54 origin = request.headers.get('Origin')
51 55 if should_return:
52 log.info("CORS for %s" % origin)
56 log.info('CORS for %s' % origin)
53 57 if not origin:
54 58 return False
55 for domain in application.domains.split("\n"):
59 for domain in application.domains.split('\n'):
56 60 if domain in origin:
57 61 origin_found = True
58 62 if origin_found:
59 request.response.headers.add("Access-Control-Allow-Origin", origin)
60 request.response.headers.add("XDomainRequestAllowed", "1")
61 request.response.headers.add(
62 "Access-Control-Allow-Methods", "GET, POST, OPTIONS"
63 )
64 request.response.headers.add(
65 "Access-Control-Allow-Headers",
66 "Accept-Encoding, Accept-Language, "
67 "Content-Type, "
68 "Depth, User-Agent, X-File-Size, "
69 "X-Requested-With, If-Modified-Since, "
70 "X-File-Name, "
71 "Cache-Control, Host, Pragma, Accept, "
72 "Origin, Connection, "
73 "Referer, Cookie, "
74 "X-appenlight-public-api-key, "
75 "x-appenlight-public-api-key",
76 )
77 request.response.headers.add("Access-Control-Max-Age", "86400")
63 request.response.headers.add('Access-Control-Allow-Origin', origin)
64 request.response.headers.add('XDomainRequestAllowed', '1')
65 request.response.headers.add('Access-Control-Allow-Methods',
66 'GET, POST, OPTIONS')
67 request.response.headers.add('Access-Control-Allow-Headers',
68 'Accept-Encoding, Accept-Language, '
69 'Content-Type, '
70 'Depth, User-Agent, X-File-Size, '
71 'X-Requested-With, If-Modified-Since, '
72 'X-File-Name, '
73 'Cache-Control, Host, Pragma, Accept, '
74 'Origin, Connection, '
75 'Referer, Cookie, '
76 'X-appenlight-public-api-key, '
77 'x-appenlight-public-api-key')
78 request.response.headers.add('Access-Control-Max-Age', '86400')
78 79 return request.response
79 80 else:
80 81 return HTTPForbidden()
@@ -1,169 +1,183 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors
4 4 #
5 5 # Licensed under the Apache License, Version 2.0 (the "License");
6 6 # you may not use this file except in compliance with the License.
7 7 # You may obtain a copy of the License at
8 8 #
9 9 # http://www.apache.org/licenses/LICENSE-2.0
10 10 #
11 11 # Unless required by applicable law or agreed to in writing, software
12 12 # distributed under the License is distributed on an "AS IS" BASIS,
13 13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 14 # See the License for the specific language governing permissions and
15 15 # limitations under the License.
16 16
17 17 import copy
18 18 import hashlib
19 19 import inspect
20 20
21 from dogpile.cache import make_region
22 from dogpile.cache.util import compat
21 from dogpile.cache import make_region, compat
23 22
24 23 regions = None
25 24
26 25
27 26 def key_mangler(key):
28 27 return "appenlight:dogpile:{}".format(key)
29 28
30 29
31 30 def hashgen(namespace, fn, to_str=compat.string_type):
32 31 """Return a function that generates a string
33 32 key, based on a given function as well as
34 33 arguments to the returned function itself.
35 34
36 35 This is used by :meth:`.CacheRegion.cache_on_arguments`
37 36 to generate a cache key from a decorated function.
38 37
39 38 It can be replaced using the ``function_key_generator``
40 39 argument passed to :func:`.make_region`.
41 40
42 41 """
43 42
44 43 if namespace is None:
45 namespace = "%s:%s" % (fn.__module__, fn.__name__)
44 namespace = '%s:%s' % (fn.__module__, fn.__name__)
46 45 else:
47 namespace = "%s:%s|%s" % (fn.__module__, fn.__name__, namespace)
46 namespace = '%s:%s|%s' % (fn.__module__, fn.__name__, namespace)
48 47
49 48 args = inspect.getargspec(fn)
50 has_self = args[0] and args[0][0] in ("self", "cls")
49 has_self = args[0] and args[0][0] in ('self', 'cls')
51 50
52 51 def generate_key(*args, **kw):
53 52 if kw:
54 53 raise ValueError(
55 54 "dogpile.cache's default key creation "
56 "function does not accept keyword arguments."
57 )
55 "function does not accept keyword arguments.")
58 56 if has_self:
59 57 args = args[1:]
60 58
61 return (
62 namespace
63 + "|"
64 + hashlib.sha1(" ".join(map(to_str, args)).encode("utf8")).hexdigest()
65 )
59 return namespace + "|" + hashlib.sha1(
60 " ".join(map(to_str, args)).encode('utf8')).hexdigest()
66 61
67 62 return generate_key
68 63
69 64
70 65 class CacheRegions(object):
71 66 def __init__(self, settings):
72 67 config_redis = {"arguments": settings}
73 68
74 69 self.redis_min_1 = make_region(
75 function_key_generator=hashgen, key_mangler=key_mangler
76 ).configure(
77 "dogpile.cache.redis", expiration_time=60, **copy.deepcopy(config_redis)
78 )
70 function_key_generator=hashgen,
71 key_mangler=key_mangler).configure(
72 "dogpile.cache.redis",
73 expiration_time=60,
74 **copy.deepcopy(config_redis))
79 75 self.redis_min_5 = make_region(
80 function_key_generator=hashgen, key_mangler=key_mangler
81 ).configure(
82 "dogpile.cache.redis", expiration_time=300, **copy.deepcopy(config_redis)
83 )
76 function_key_generator=hashgen,
77 key_mangler=key_mangler).configure(
78 "dogpile.cache.redis",
79 expiration_time=300,
80 **copy.deepcopy(config_redis))
84 81
85 82 self.redis_min_10 = make_region(
86 function_key_generator=hashgen, key_mangler=key_mangler
87 ).configure(
88 "dogpile.cache.redis", expiration_time=60, **copy.deepcopy(config_redis)
89 )
83 function_key_generator=hashgen,
84 key_mangler=key_mangler).configure(
85 "dogpile.cache.redis",
86 expiration_time=60,
87 **copy.deepcopy(config_redis))
90 88
91 89 self.redis_min_60 = make_region(
92 function_key_generator=hashgen, key_mangler=key_mangler
93 ).configure(
94 "dogpile.cache.redis", expiration_time=3600, **copy.deepcopy(config_redis)
95 )
90 function_key_generator=hashgen,
91 key_mangler=key_mangler).configure(
92 "dogpile.cache.redis",
93 expiration_time=3600,
94 **copy.deepcopy(config_redis))
96 95
97 96 self.redis_sec_1 = make_region(
98 function_key_generator=hashgen, key_mangler=key_mangler
99 ).configure(
100 "dogpile.cache.redis", expiration_time=1, **copy.deepcopy(config_redis)
101 )
97 function_key_generator=hashgen,
98 key_mangler=key_mangler).configure(
99 "dogpile.cache.redis",
100 expiration_time=1,
101 **copy.deepcopy(config_redis))
102 102
103 103 self.redis_sec_5 = make_region(
104 function_key_generator=hashgen, key_mangler=key_mangler
105 ).configure(
106 "dogpile.cache.redis", expiration_time=5, **copy.deepcopy(config_redis)
107 )
104 function_key_generator=hashgen,
105 key_mangler=key_mangler).configure(
106 "dogpile.cache.redis",
107 expiration_time=5,
108 **copy.deepcopy(config_redis))
108 109
109 110 self.redis_sec_30 = make_region(
110 function_key_generator=hashgen, key_mangler=key_mangler
111 ).configure(
112 "dogpile.cache.redis", expiration_time=30, **copy.deepcopy(config_redis)
113 )
111 function_key_generator=hashgen,
112 key_mangler=key_mangler).configure(
113 "dogpile.cache.redis",
114 expiration_time=30,
115 **copy.deepcopy(config_redis))
114 116
115 117 self.redis_day_1 = make_region(
116 function_key_generator=hashgen, key_mangler=key_mangler
117 ).configure(
118 "dogpile.cache.redis", expiration_time=86400, **copy.deepcopy(config_redis)
119 )
118 function_key_generator=hashgen,
119 key_mangler=key_mangler).configure(
120 "dogpile.cache.redis",
121 expiration_time=86400,
122 **copy.deepcopy(config_redis))
120 123
121 124 self.redis_day_7 = make_region(
122 function_key_generator=hashgen, key_mangler=key_mangler
123 ).configure(
125 function_key_generator=hashgen,
126 key_mangler=key_mangler).configure(
124 127 "dogpile.cache.redis",
125 128 expiration_time=86400 * 7,
126 **copy.deepcopy(config_redis)
127 )
129 **copy.deepcopy(config_redis))
128 130
129 131 self.redis_day_30 = make_region(
130 function_key_generator=hashgen, key_mangler=key_mangler
131 ).configure(
132 function_key_generator=hashgen,
133 key_mangler=key_mangler).configure(
132 134 "dogpile.cache.redis",
133 135 expiration_time=86400 * 30,
134 **copy.deepcopy(config_redis)
135 )
136 **copy.deepcopy(config_redis))
136 137
137 138 self.memory_day_1 = make_region(
138 function_key_generator=hashgen, key_mangler=key_mangler
139 ).configure(
140 "dogpile.cache.memory", expiration_time=86400, **copy.deepcopy(config_redis)
141 )
139 function_key_generator=hashgen,
140 key_mangler=key_mangler).configure(
141 "dogpile.cache.memory",
142 expiration_time=86400,
143 **copy.deepcopy(config_redis))
142 144
143 145 self.memory_sec_1 = make_region(
144 function_key_generator=hashgen, key_mangler=key_mangler
145 ).configure("dogpile.cache.memory", expiration_time=1)
146 function_key_generator=hashgen,
147 key_mangler=key_mangler).configure(
148 "dogpile.cache.memory",
149 expiration_time=1)
146 150
147 151 self.memory_sec_5 = make_region(
148 function_key_generator=hashgen, key_mangler=key_mangler
149 ).configure("dogpile.cache.memory", expiration_time=5)
152 function_key_generator=hashgen,
153 key_mangler=key_mangler).configure(
154 "dogpile.cache.memory",
155 expiration_time=5)
150 156
151 157 self.memory_min_1 = make_region(
152 function_key_generator=hashgen, key_mangler=key_mangler
153 ).configure("dogpile.cache.memory", expiration_time=60)
158 function_key_generator=hashgen,
159 key_mangler=key_mangler).configure(
160 "dogpile.cache.memory",
161 expiration_time=60)
154 162
155 163 self.memory_min_5 = make_region(
156 function_key_generator=hashgen, key_mangler=key_mangler
157 ).configure("dogpile.cache.memory", expiration_time=300)
164 function_key_generator=hashgen,
165 key_mangler=key_mangler).configure(
166 "dogpile.cache.memory",
167 expiration_time=300)
158 168
159 169 self.memory_min_10 = make_region(
160 function_key_generator=hashgen, key_mangler=key_mangler
161 ).configure("dogpile.cache.memory", expiration_time=600)
170 function_key_generator=hashgen,
171 key_mangler=key_mangler).configure(
172 "dogpile.cache.memory",
173 expiration_time=600)
162 174
163 175 self.memory_min_60 = make_region(
164 function_key_generator=hashgen, key_mangler=key_mangler
165 ).configure("dogpile.cache.memory", expiration_time=3600)
176 function_key_generator=hashgen,
177 key_mangler=key_mangler).configure(
178 "dogpile.cache.memory",
179 expiration_time=3600)
166 180
167 181
168 182 def get_region(region):
169 183 return getattr(regions, region)
@@ -1,92 +1,92 b''
1 1 import inspect
2 2 import logging
3 3
4 4 from pyramid.config import Configurator
5 5
6 6 log = logging.getLogger(__name__)
7 7
8
9 8 class InspectProxy(object):
10 9 """
11 10 Proxy to the `inspect` module that allows us to use the pyramid include
12 11 mechanism for cythonized modules without source file.
13 12 """
14 13
15 14 def _get_cyfunction_func_code(self, cyfunction):
16 15 """
17 16 Unpack the `func_code` attribute of a cython function.
18 17 """
19 18 if inspect.ismethod(cyfunction):
20 19 cyfunction = cyfunction.im_func
21 return getattr(cyfunction, "func_code")
20 return getattr(cyfunction, 'func_code')
22 21
23 22 def getmodule(self, *args, **kwds):
24 23 """
25 24 Simple proxy to `inspect.getmodule`.
26 25 """
27 26 return inspect.getmodule(*args, **kwds)
28 27
29 28 def getsourcefile(self, obj):
30 29 """
31 30 Proxy to `inspect.getsourcefile` or `inspect.getfile` depending on if
32 31 it's called to look up the source file that contains the magic pyramid
33 32 `includeme` callable.
34 33
35 34 For cythonized modules the source file may be deleted. Therefore we
36 35 return the result of `inspect.getfile` instead. In the case of the
37 36 `configurator.include` method this is OK, because the result is passed
38 37 to `os.path.dirname` which strips the file name. So it doesn't matter
39 38 if we return the path to the source file or another file in the same
40 39 directory.
41 40 """
42 41 # Check if it's called to look up the source file that contains the
43 42 # magic pyramid `includeme` callable.
44 if getattr(obj, "__name__") == "includeme":
43 if getattr(obj, '__name__') == 'includeme':
45 44 try:
46 45 return inspect.getfile(obj)
47 46 except TypeError as e:
48 47 # Cython functions are not recognized as functions by the
49 48 # inspect module. We have to unpack the func_code attribute
50 49 # ourself.
51 if "cyfunction" in e.message:
50 if 'cyfunction' in e.message:
52 51 obj = self._get_cyfunction_func_code(obj)
53 52 return inspect.getfile(obj)
54 53 raise
55 54 else:
56 55 return inspect.getsourcefile(obj)
57 56
58 57
59 58 class CythonCompatConfigurator(Configurator):
60 59 """
61 60 Customized configurator to replace the inspect class attribute with
62 61 a custom one that is cython compatible.
63 62 """
64
65 63 inspect = InspectProxy()
66 64
67 65
68 66 def register_appenlight_plugin(config, plugin_name, plugin_config):
69 67 def register():
70 log.warning("Registering plugin: {}".format(plugin_name))
68 log.warning('Registering plugin: {}'.format(plugin_name))
71 69 if plugin_name not in config.registry.appenlight_plugins:
72 70 config.registry.appenlight_plugins[plugin_name] = {
73 "javascript": None,
74 "static": None,
75 "css": None,
76 "celery_tasks": None,
77 "celery_beats": None,
78 "fulltext_indexer": None,
79 "sqlalchemy_migrations": None,
80 "default_values_setter": None,
81 "header_html": None,
82 "resource_types": [],
83 "url_gen": None,
71 'javascript': None,
72 'static': None,
73 'css': None,
74 'celery_tasks': None,
75 'celery_beats': None,
76 'fulltext_indexer': None,
77 'sqlalchemy_migrations': None,
78 'default_values_setter': None,
79 'header_html': None,
80 'resource_types': [],
81 'url_gen': None
84 82 }
85 config.registry.appenlight_plugins[plugin_name].update(plugin_config)
83 config.registry.appenlight_plugins[plugin_name].update(
84 plugin_config)
86 85 # inform AE what kind of resource types we have available
87 86 # so we can avoid failing when a plugin is removed but data
88 87 # is still present in the db
89 if plugin_config.get("resource_types"):
90 config.registry.resource_types.extend(plugin_config["resource_types"])
88 if plugin_config.get('resource_types'):
89 config.registry.resource_types.extend(
90 plugin_config['resource_types'])
91 91
92 config.action("appenlight_plugin={}".format(plugin_name), register)
92 config.action('appenlight_plugin={}'.format(plugin_name), register)
@@ -1,58 +1,58 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors
4 4 #
5 5 # Licensed under the Apache License, Version 2.0 (the "License");
6 6 # you may not use this file except in compliance with the License.
7 7 # You may obtain a copy of the License at
8 8 #
9 9 # http://www.apache.org/licenses/LICENSE-2.0
10 10 #
11 11 # Unless required by applicable law or agreed to in writing, software
12 12 # distributed under the License is distributed on an "AS IS" BASIS,
13 13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 14 # See the License for the specific language governing permissions and
15 15 # limitations under the License.
16 16
17 17 # this gets set on runtime
18 18 from cryptography.fernet import Fernet
19 19
20 20 ENCRYPTION_SECRET = None
21 21
22 22
23 23 def encrypt_fernet(value):
24 24 # avoid double encryption
25 25 # not sure if this is needed but it won't hurt too much to have this
26 if value.startswith("enc$fernet$"):
26 if value.startswith('enc$fernet$'):
27 27 return value
28 28 f = Fernet(ENCRYPTION_SECRET)
29 return "enc$fernet${}".format(f.encrypt(value.encode("utf8")).decode("utf8"))
29 return 'enc$fernet${}'.format(f.encrypt(value.encode('utf8')).decode('utf8'))
30 30
31 31
32 32 def decrypt_fernet(value):
33 parts = value.split("$", 3)
33 parts = value.split('$', 3)
34 34 if not len(parts) == 3:
35 35 # not encrypted values
36 36 return value
37 37 else:
38 38 f = Fernet(ENCRYPTION_SECRET)
39 decrypted_data = f.decrypt(parts[2].encode("utf8")).decode("utf8")
39 decrypted_data = f.decrypt(parts[2].encode('utf8')).decode('utf8')
40 40 return decrypted_data
41 41
42 42
43 43 def encrypt_dictionary_keys(_dict, exclude_keys=None):
44 44 if not exclude_keys:
45 45 exclude_keys = []
46 46 keys = [k for k in _dict.keys() if k not in exclude_keys]
47 47 for k in keys:
48 48 _dict[k] = encrypt_fernet(_dict[k])
49 49 return _dict
50 50
51 51
52 52 def decrypt_dictionary_keys(_dict, exclude_keys=None):
53 53 if not exclude_keys:
54 54 exclude_keys = []
55 55 keys = [k for k in _dict.keys() if k not in exclude_keys]
56 56 for k in keys:
57 57 _dict[k] = decrypt_fernet(_dict[k])
58 58 return _dict
@@ -1,90 +1,88 b''
1 1 import collections
2
3 2 # -*- coding: utf-8 -*-
4 3
5 4 # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors
6 5 #
7 6 # Licensed under the Apache License, Version 2.0 (the "License");
8 7 # you may not use this file except in compliance with the License.
9 8 # You may obtain a copy of the License at
10 9 #
11 10 # http://www.apache.org/licenses/LICENSE-2.0
12 11 #
13 12 # Unless required by applicable law or agreed to in writing, software
14 13 # distributed under the License is distributed on an "AS IS" BASIS,
15 14 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 15 # See the License for the specific language governing permissions and
17 16 # limitations under the License.
18 17
19 18
20 19 class StupidEnum(object):
21 20 @classmethod
22 21 def set_inverse(cls):
23 22 cls._inverse_values = dict(
24 (y, x)
25 for x, y in vars(cls).items()
26 if not x.startswith("_") and not callable(y)
23 (y, x) for x, y in vars(cls).items() if
24 not x.startswith('_') and not callable(y)
27 25 )
28 26
29 27 @classmethod
30 28 def key_from_value(cls, value):
31 if not hasattr(cls, "_inverse_values"):
29 if not hasattr(cls, '_inverse_values'):
32 30 cls.set_inverse()
33 31 return cls._inverse_values.get(value)
34 32
35 33
36 34 class ReportType(StupidEnum):
37 35 unknown = 0
38 36 error = 1
39 37 not_found = 2
40 38 slow = 3
41 39
42 40
43 41 class Language(StupidEnum):
44 42 unknown = 0
45 43 python = 1
46 44 javascript = 2
47 45 java = 3
48 46 objectivec = 4
49 47 swift = 5
50 48 cpp = 6
51 49 basic = 7
52 50 csharp = 8
53 51 php = 9
54 52 perl = 10
55 53 vb = 11
56 54 vbnet = 12
57 55 ruby = 13
58 56 fsharp = 14
59 57 actionscript = 15
60 58 go = 16
61 59 scala = 17
62 60 haskell = 18
63 61 erlang = 19
64 62 haxe = 20
65 63 scheme = 21
66 64
67 65
68 66 class LogLevel(StupidEnum):
69 67 UNKNOWN = 0
70 68 DEBUG = 2
71 69 TRACE = 4
72 70 INFO = 6
73 71 WARNING = 8
74 72 ERROR = 10
75 73 CRITICAL = 12
76 74 FATAL = 14
77 75
78 76
79 77 class LogLevelPython(StupidEnum):
80 78 CRITICAL = 50
81 79 ERROR = 40
82 80 WARNING = 30
83 81 INFO = 20
84 82 DEBUG = 10
85 83 NOTSET = 0
86 84
87 85
88 86 class ParsedSentryEventType(StupidEnum):
89 87 ERROR_REPORT = 1
90 88 LOG = 2
@@ -1,143 +1,148 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors
4 4 #
5 5 # Licensed under the Apache License, Version 2.0 (the "License");
6 6 # you may not use this file except in compliance with the License.
7 7 # You may obtain a copy of the License at
8 8 #
9 9 # http://www.apache.org/licenses/LICENSE-2.0
10 10 #
11 11 # Unless required by applicable law or agreed to in writing, software
12 12 # distributed under the License is distributed on an "AS IS" BASIS,
13 13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 14 # See the License for the specific language governing permissions and
15 15 # limitations under the License.
16 16
17 17 """
18 18 ex-json borrowed from Marcin Kuzminski
19 19
20 20 source: https://secure.rhodecode.org/ext-json
21 21
22 22 """
23 23 import datetime
24 24 import functools
25 25 import decimal
26 26 import imp
27 27
28 __all__ = ["json", "simplejson", "stdlibjson"]
28 __all__ = ['json', 'simplejson', 'stdlibjson']
29 29
30 30
31 31 def _is_aware(value):
32 32 """
33 33 Determines if a given datetime.time is aware.
34 34
35 35 The logic is described in Python's docs:
36 36 http://docs.python.org/library/datetime.html#datetime.tzinfo
37 37 """
38 return value.tzinfo is not None and value.tzinfo.utcoffset(value) is not None
38 return (value.tzinfo is not None
39 and value.tzinfo.utcoffset(value) is not None)
39 40
40 41
41 42 def _obj_dump(obj):
42 43 """
43 44 Custom function for dumping objects to JSON, if obj has __json__ attribute
44 45 or method defined it will be used for serialization
45 46
46 47 :param obj:
47 48 """
48 49
49 50 if isinstance(obj, complex):
50 51 return [obj.real, obj.imag]
51 52 # See "Date Time String Format" in the ECMA-262 specification.
52 53 # some code borrowed from django 1.4
53 54 elif isinstance(obj, datetime.datetime):
54 55 r = obj.isoformat()
55 56 # if obj.microsecond:
56 57 # r = r[:23] + r[26:]
57 if r.endswith("+00:00"):
58 r = r[:-6] + "Z"
58 if r.endswith('+00:00'):
59 r = r[:-6] + 'Z'
59 60 return r
60 61 elif isinstance(obj, datetime.date):
61 62 return obj.isoformat()
62 63 elif isinstance(obj, decimal.Decimal):
63 64 return str(obj)
64 65 elif isinstance(obj, datetime.time):
65 66 if _is_aware(obj):
66 67 raise ValueError("JSON can't represent timezone-aware times.")
67 68 r = obj.isoformat()
68 69 if obj.microsecond:
69 70 r = r[:12]
70 71 return r
71 72 elif isinstance(obj, set):
72 73 return list(obj)
73 elif hasattr(obj, "__json__"):
74 elif hasattr(obj, '__json__'):
74 75 if callable(obj.__json__):
75 76 return obj.__json__()
76 77 else:
77 78 return obj.__json__
78 79 else:
79 80 raise NotImplementedError
80 81
81 82
82 83 # Import simplejson
83 84 try:
84 85 # import simplejson initially
85 _sj = imp.load_module("_sj", *imp.find_module("simplejson"))
86 _sj = imp.load_module('_sj', *imp.find_module('simplejson'))
87
86 88
87 89 def extended_encode(obj):
88 90 try:
89 91 return _obj_dump(obj)
90 92 except NotImplementedError:
91 93 pass
92 94 raise TypeError("%r is not JSON serializable" % (obj,))
93 95
96
94 97 # we handle decimals our own it makes unified behavior of json vs
95 98 # simplejson
96 sj_version = [int(x) for x in _sj.__version__.split(".")]
99 sj_version = [int(x) for x in _sj.__version__.split('.')]
97 100 major, minor = sj_version[0], sj_version[1]
98 101 if major < 2 or (major == 2 and minor < 1):
99 102 # simplejson < 2.1 doesnt support use_decimal
100 _sj.dumps = functools.partial(_sj.dumps, default=extended_encode)
101 _sj.dump = functools.partial(_sj.dump, default=extended_encode)
103 _sj.dumps = functools.partial(
104 _sj.dumps, default=extended_encode)
105 _sj.dump = functools.partial(
106 _sj.dump, default=extended_encode)
102 107 else:
103 108 _sj.dumps = functools.partial(
104 _sj.dumps, default=extended_encode, use_decimal=False
105 )
109 _sj.dumps, default=extended_encode, use_decimal=False)
106 110 _sj.dump = functools.partial(
107 _sj.dump, default=extended_encode, use_decimal=False
108 )
111 _sj.dump, default=extended_encode, use_decimal=False)
109 112 simplejson = _sj
110 113
111 114 except ImportError:
112 115 # no simplejson set it to None
113 116 simplejson = None
114 117
115 118 try:
116 119 # simplejson not found try out regular json module
117 _json = imp.load_module("_json", *imp.find_module("json"))
120 _json = imp.load_module('_json', *imp.find_module('json'))
121
118 122
119 123 # extended JSON encoder for json
120 124 class ExtendedEncoder(_json.JSONEncoder):
121 125 def default(self, obj):
122 126 try:
123 127 return _obj_dump(obj)
124 128 except NotImplementedError:
125 129 pass
126 130 raise TypeError("%r is not JSON serializable" % (obj,))
127 131
132
128 133 # monkey-patch JSON encoder to use extended version
129 134 _json.dumps = functools.partial(_json.dumps, cls=ExtendedEncoder)
130 135 _json.dump = functools.partial(_json.dump, cls=ExtendedEncoder)
131 136
132 137 except ImportError:
133 138 json = None
134 139
135 140 stdlibjson = _json
136 141
137 142 # set all available json modules
138 143 if simplejson:
139 144 json = _sj
140 145 elif _json:
141 146 json = _json
142 147 else:
143 raise ImportError("Could not find any json modules")
148 raise ImportError('Could not find any json modules')
@@ -1,160 +1,119 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors
4 4 #
5 5 # Licensed under the Apache License, Version 2.0 (the "License");
6 6 # you may not use this file except in compliance with the License.
7 7 # You may obtain a copy of the License at
8 8 #
9 9 # http://www.apache.org/licenses/LICENSE-2.0
10 10 #
11 11 # Unless required by applicable law or agreed to in writing, software
12 12 # distributed under the License is distributed on an "AS IS" BASIS,
13 13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 14 # See the License for the specific language governing permissions and
15 15 # limitations under the License.
16 16
17 17 """
18 18 Helper functions
19 19 """
20 20 import copy
21 21 import datetime
22 22
23 23 from collections import namedtuple, OrderedDict
24 24
25 25 _ = lambda x: x
26 26
27 27 time_deltas = OrderedDict()
28 28
29 time_deltas["1m"] = {
30 "delta": datetime.timedelta(minutes=1),
31 "label": "1 minute",
32 "minutes": 1,
33 }
34
35 time_deltas["5m"] = {
36 "delta": datetime.timedelta(minutes=5),
37 "label": "5 minutes",
38 "minutes": 5,
39 }
40 time_deltas["30m"] = {
41 "delta": datetime.timedelta(minutes=30),
42 "label": "30 minutes",
43 "minutes": 30,
44 }
45 time_deltas["1h"] = {
46 "delta": datetime.timedelta(hours=1),
47 "label": "60 minutes",
48 "minutes": 60,
49 }
50 time_deltas["4h"] = {
51 "delta": datetime.timedelta(hours=4),
52 "label": "4 hours",
53 "minutes": 60 * 4,
54 }
55 time_deltas["12h"] = {
56 "delta": datetime.timedelta(hours=12),
57 "label": "12 hours",
58 "minutes": 60 * 12,
59 }
60 time_deltas["24h"] = {
61 "delta": datetime.timedelta(hours=24),
62 "label": "24 hours",
63 "minutes": 60 * 24,
64 }
65 time_deltas["3d"] = {
66 "delta": datetime.timedelta(days=3),
67 "label": "3 days",
68 "minutes": 60 * 24 * 3,
69 }
70 time_deltas["1w"] = {
71 "delta": datetime.timedelta(days=7),
72 "label": "7 days",
73 "minutes": 60 * 24 * 7,
74 }
75 time_deltas["2w"] = {
76 "delta": datetime.timedelta(days=14),
77 "label": "14 days",
78 "minutes": 60 * 24 * 14,
79 }
80 time_deltas["1M"] = {
81 "delta": datetime.timedelta(days=31),
82 "label": "31 days",
83 "minutes": 60 * 24 * 31,
84 }
85 time_deltas["3M"] = {
86 "delta": datetime.timedelta(days=31 * 3),
87 "label": "3 months",
88 "minutes": 60 * 24 * 31 * 3,
89 }
90 time_deltas["6M"] = {
91 "delta": datetime.timedelta(days=31 * 6),
92 "label": "6 months",
93 "minutes": 60 * 24 * 31 * 6,
94 }
95 time_deltas["12M"] = {
96 "delta": datetime.timedelta(days=31 * 12),
97 "label": "12 months",
98 "minutes": 60 * 24 * 31 * 12,
99 }
29 time_deltas['1m'] = {'delta': datetime.timedelta(minutes=1),
30 'label': '1 minute', 'minutes': 1}
31
32 time_deltas['5m'] = {'delta': datetime.timedelta(minutes=5),
33 'label': '5 minutes', 'minutes': 5}
34 time_deltas['30m'] = {'delta': datetime.timedelta(minutes=30),
35 'label': '30 minutes', 'minutes': 30}
36 time_deltas['1h'] = {'delta': datetime.timedelta(hours=1),
37 'label': '60 minutes', 'minutes': 60}
38 time_deltas['4h'] = {'delta': datetime.timedelta(hours=4), 'label': '4 hours',
39 'minutes': 60 * 4}
40 time_deltas['12h'] = {'delta': datetime.timedelta(hours=12),
41 'label': '12 hours', 'minutes': 60 * 12}
42 time_deltas['24h'] = {'delta': datetime.timedelta(hours=24),
43 'label': '24 hours', 'minutes': 60 * 24}
44 time_deltas['3d'] = {'delta': datetime.timedelta(days=3), 'label': '3 days',
45 'minutes': 60 * 24 * 3}
46 time_deltas['1w'] = {'delta': datetime.timedelta(days=7), 'label': '7 days',
47 'minutes': 60 * 24 * 7}
48 time_deltas['2w'] = {'delta': datetime.timedelta(days=14), 'label': '14 days',
49 'minutes': 60 * 24 * 14}
50 time_deltas['1M'] = {'delta': datetime.timedelta(days=31), 'label': '31 days',
51 'minutes': 60 * 24 * 31}
52 time_deltas['3M'] = {'delta': datetime.timedelta(days=31 * 3),
53 'label': '3 months',
54 'minutes': 60 * 24 * 31 * 3}
55 time_deltas['6M'] = {'delta': datetime.timedelta(days=31 * 6),
56 'label': '6 months',
57 'minutes': 60 * 24 * 31 * 6}
58 time_deltas['12M'] = {'delta': datetime.timedelta(days=31 * 12),
59 'label': '12 months',
60 'minutes': 60 * 24 * 31 * 12}
100 61
101 62 # used in json representation
102 time_options = dict(
103 [
104 (k, {"label": v["label"], "minutes": v["minutes"]})
105 for k, v in time_deltas.items()
106 ]
107 )
108 FlashMsg = namedtuple("FlashMsg", ["msg", "level"])
63 time_options = dict([(k, {'label': v['label'], 'minutes': v['minutes']})
64 for k, v in time_deltas.items()])
65 FlashMsg = namedtuple('FlashMsg', ['msg', 'level'])
109 66
110 67
111 68 def get_flash(request):
112 69 messages = []
113 70 messages.extend(
114 [FlashMsg(msg, "error") for msg in request.session.peek_flash("error")]
115 )
71 [FlashMsg(msg, 'error')
72 for msg in request.session.peek_flash('error')])
73 messages.extend([FlashMsg(msg, 'warning')
74 for msg in request.session.peek_flash('warning')])
116 75 messages.extend(
117 [FlashMsg(msg, "warning") for msg in request.session.peek_flash("warning")]
118 )
119 messages.extend([FlashMsg(msg, "notice") for msg in request.session.peek_flash()])
76 [FlashMsg(msg, 'notice') for msg in request.session.peek_flash()])
120 77 return messages
121 78
122 79
123 80 def clear_flash(request):
124 request.session.pop_flash("error")
125 request.session.pop_flash("warning")
81 request.session.pop_flash('error')
82 request.session.pop_flash('warning')
126 83 request.session.pop_flash()
127 84
128 85
129 86 def get_type_formatted_flash(request):
130 return [
131 {"msg": message.msg, "type": message.level} for message in get_flash(request)
132 ]
87 return [{'msg': message.msg, 'type': message.level}
88 for message in get_flash(request)]
133 89
134 90
135 91 def gen_pagination_headers(request, paginator):
136 92 headers = {
137 "x-total-count": str(paginator.item_count),
138 "x-current-page": str(paginator.page),
139 "x-items-per-page": str(paginator.items_per_page),
93 'x-total-count': str(paginator.item_count),
94 'x-current-page': str(paginator.page),
95 'x-items-per-page': str(paginator.items_per_page)
140 96 }
141 97 params_dict = request.GET.dict_of_lists()
142 98 last_page_params = copy.deepcopy(params_dict)
143 last_page_params["page"] = paginator.last_page or 1
99 last_page_params['page'] = paginator.last_page or 1
144 100 first_page_params = copy.deepcopy(params_dict)
145 first_page_params.pop("page", None)
101 first_page_params.pop('page', None)
146 102 next_page_params = copy.deepcopy(params_dict)
147 next_page_params["page"] = paginator.next_page or paginator.last_page or 1
103 next_page_params['page'] = paginator.next_page or paginator.last_page or 1
148 104 prev_page_params = copy.deepcopy(params_dict)
149 prev_page_params["page"] = paginator.previous_page or 1
105 prev_page_params['page'] = paginator.previous_page or 1
150 106 lp_url = request.current_route_url(_query=last_page_params)
151 107 fp_url = request.current_route_url(_query=first_page_params)
152 links = ['rel="last", <{}>'.format(lp_url), 'rel="first", <{}>'.format(fp_url)]
108 links = [
109 'rel="last", <{}>'.format(lp_url),
110 'rel="first", <{}>'.format(fp_url),
111 ]
153 112 if first_page_params != prev_page_params:
154 113 prev_url = request.current_route_url(_query=prev_page_params)
155 114 links.append('rel="prev", <{}>'.format(prev_url))
156 115 if last_page_params != next_page_params:
157 116 next_url = request.current_route_url(_query=next_page_params)
158 117 links.append('rel="next", <{}>'.format(next_url))
159 headers["link"] = "; ".join(links)
118 headers['link'] = '; '.join(links)
160 119 return headers
@@ -1,53 +1,46 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors
4 4 #
5 5 # Licensed under the Apache License, Version 2.0 (the "License");
6 6 # you may not use this file except in compliance with the License.
7 7 # You may obtain a copy of the License at
8 8 #
9 9 # http://www.apache.org/licenses/LICENSE-2.0
10 10 #
11 11 # Unless required by applicable law or agreed to in writing, software
12 12 # distributed under the License is distributed on an "AS IS" BASIS,
13 13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 14 # See the License for the specific language governing permissions and
15 15 # limitations under the License.
16 16
17 17 import re
18 18 from appenlight.lib.ext_json import json
19 19 from jinja2 import Markup, escape, evalcontextfilter
20 20
21 _paragraph_re = re.compile(r"(?:\r\n|\r|\n){2,}")
21 _paragraph_re = re.compile(r'(?:\r\n|\r|\n){2,}')
22 22
23 23
24 24 @evalcontextfilter
25 25 def nl2br(eval_ctx, value):
26 26 if eval_ctx.autoescape:
27 result = "\n\n".join(
28 "<p>%s</p>" % p.replace("\n", Markup("<br>\n"))
29 for p in _paragraph_re.split(escape(value))
30 )
27 result = '\n\n'.join('<p>%s</p>' % p.replace('\n', Markup('<br>\n'))
28 for p in _paragraph_re.split(escape(value)))
31 29 else:
32 result = "\n\n".join(
33 "<p>%s</p>" % p.replace("\n", "<br>\n")
34 for p in _paragraph_re.split(escape(value))
35 )
30 result = '\n\n'.join('<p>%s</p>' % p.replace('\n', '<br>\n')
31 for p in _paragraph_re.split(escape(value)))
36 32 if eval_ctx.autoescape:
37 33 result = Markup(result)
38 34 return result
39 35
40 36
41 37 @evalcontextfilter
42 38 def toJSONUnsafe(eval_ctx, value):
43 encoded = (
44 json.dumps(value)
45 .replace("&", "\\u0026")
46 .replace("<", "\\u003c")
47 .replace(">", "\\u003e")
48 .replace(">", "\\u003e")
49 .replace('"', "\\u0022")
50 .replace("'", "\\u0027")
51 .replace(r"\n", "/\\\n")
52 )
39 encoded = json.dumps(value).replace('&', '\\u0026') \
40 .replace('<', '\\u003c') \
41 .replace('>', '\\u003e') \
42 .replace('>', '\\u003e') \
43 .replace('"', '\\u0022') \
44 .replace("'", '\\u0027') \
45 .replace(r'\n', '/\\\n')
53 46 return Markup("'%s'" % encoded)
@@ -1,83 +1,64 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors
4 4 #
5 5 # Licensed under the Apache License, Version 2.0 (the "License");
6 6 # you may not use this file except in compliance with the License.
7 7 # You may obtain a copy of the License at
8 8 #
9 9 # http://www.apache.org/licenses/LICENSE-2.0
10 10 #
11 11 # Unless required by applicable law or agreed to in writing, software
12 12 # distributed under the License is distributed on an "AS IS" BASIS,
13 13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 14 # See the License for the specific language governing permissions and
15 15 # limitations under the License.
16 16
17 17 import json
18 18 import logging
19 19
20 ignored_keys = [
21 "args",
22 "asctime",
23 "created",
24 "exc_info",
25 "exc_text",
26 "filename",
27 "funcName",
28 "levelname",
29 "levelno",
30 "lineno",
31 "message",
32 "module",
33 "msecs",
34 "msg",
35 "name",
36 "pathname",
37 "process",
38 "processName",
39 "relativeCreated",
40 "stack_info",
41 "thread",
42 "threadName",
43 ]
20 ignored_keys = ['args', 'asctime', 'created', 'exc_info', 'exc_text',
21 'filename', 'funcName', 'levelname', 'levelno', 'lineno',
22 'message', 'module', 'msecs', 'msg', 'name', 'pathname',
23 'process', 'processName', 'relativeCreated', 'stack_info',
24 'thread', 'threadName']
44 25
45 26
46 27 class JSONFormatter(logging.Formatter):
47 28 def format(self, record):
48 29 """
49 30 Format the specified record as text.
50 31
51 32 The record's attribute dictionary is used as the operand to a
52 33 string formatting operation which yields the returned string.
53 34 Before formatting the dictionary, a couple of preparatory steps
54 35 are carried out. The message attribute of the record is computed
55 36 using LogRecord.getMessage(). If the formatting string uses the
56 37 time (as determined by a call to usesTime(), formatTime() is
57 38 called to format the event time. If there is exception information,
58 39 it is formatted using formatException() and appended to the message.
59 40 """
60 41 record.message = record.getMessage()
61 42 log_dict = vars(record)
62 43 keys = [k for k in log_dict.keys() if k not in ignored_keys]
63 payload = {"message": record.message}
44 payload = {'message': record.message}
64 45 payload.update({k: log_dict[k] for k in keys})
65 46 record.message = json.dumps(payload, default=lambda x: str(x))
66 47
67 48 if self.usesTime():
68 49 record.asctime = self.formatTime(record, self.datefmt)
69 50 s = self.formatMessage(record)
70 51 if record.exc_info:
71 52 # Cache the traceback text to avoid converting it multiple times
72 53 # (it's constant anyway)
73 54 if not record.exc_text:
74 55 record.exc_text = self.formatException(record.exc_info)
75 56 if record.exc_text:
76 57 if s[-1:] != "\n":
77 58 s = s + "\n"
78 59 s = s + record.exc_text
79 60 if record.stack_info:
80 61 if s[-1:] != "\n":
81 62 s = s + "\n"
82 63 s = s + self.formatStack(record.stack_info)
83 64 return s
@@ -1,69 +1,65 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors
4 4 #
5 5 # Licensed under the Apache License, Version 2.0 (the "License");
6 6 # you may not use this file except in compliance with the License.
7 7 # You may obtain a copy of the License at
8 8 #
9 9 # http://www.apache.org/licenses/LICENSE-2.0
10 10 #
11 11 # Unless required by applicable law or agreed to in writing, software
12 12 # distributed under the License is distributed on an "AS IS" BASIS,
13 13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 14 # See the License for the specific language governing permissions and
15 15 # limitations under the License.
16 16
17 BASE = "appenlight:data:{}"
17 BASE = 'appenlight:data:{}'
18 18
19 19 REDIS_KEYS = {
20 "tasks": {
21 "add_reports_lock": BASE.format("add_reports_lock:{}"),
22 "add_logs_lock": BASE.format("add_logs_lock:{}"),
20 'tasks': {
21 'add_reports_lock': BASE.format('add_reports_lock:{}'),
22 'add_logs_lock': BASE.format('add_logs_lock:{}'),
23 23 },
24 "counters": {
25 "events_per_minute_per_user": BASE.format("events_per_minute_per_user:{}:{}"),
26 "reports_per_minute": BASE.format("reports_per_minute:{}"),
27 "reports_per_hour_per_app": BASE.format("reports_per_hour_per_app:{}:{}"),
28 "reports_per_type": BASE.format("reports_per_type:{}"),
29 "logs_per_minute": BASE.format("logs_per_minute:{}"),
30 "logs_per_hour_per_app": BASE.format("logs_per_hour_per_app:{}:{}"),
31 "metrics_per_minute": BASE.format("metrics_per_minute:{}"),
32 "metrics_per_hour_per_app": BASE.format("metrics_per_hour_per_app:{}:{}"),
33 "report_group_occurences": BASE.format("report_group_occurences:{}"),
34 "report_group_occurences_alerting": BASE.format(
35 "report_group_occurences_alerting:{}"
36 ),
37 "report_group_occurences_10th": BASE.format("report_group_occurences_10th:{}"),
38 "report_group_occurences_100th": BASE.format(
39 "report_group_occurences_100th:{}"
40 ),
24 'counters': {
25 'events_per_minute_per_user': BASE.format(
26 'events_per_minute_per_user:{}:{}'),
27 'reports_per_minute': BASE.format('reports_per_minute:{}'),
28 'reports_per_hour_per_app': BASE.format(
29 'reports_per_hour_per_app:{}:{}'),
30 'reports_per_type': BASE.format('reports_per_type:{}'),
31 'logs_per_minute': BASE.format('logs_per_minute:{}'),
32 'logs_per_hour_per_app': BASE.format(
33 'logs_per_hour_per_app:{}:{}'),
34 'metrics_per_minute': BASE.format('metrics_per_minute:{}'),
35 'metrics_per_hour_per_app': BASE.format(
36 'metrics_per_hour_per_app:{}:{}'),
37 'report_group_occurences': BASE.format('report_group_occurences:{}'),
38 'report_group_occurences_alerting': BASE.format(
39 'report_group_occurences_alerting:{}'),
40 'report_group_occurences_10th': BASE.format(
41 'report_group_occurences_10th:{}'),
42 'report_group_occurences_100th': BASE.format(
43 'report_group_occurences_100th:{}'),
41 44 },
42 "rate_limits": {
43 "per_application_reports_rate_limit": BASE.format(
44 "per_application_reports_limit:{}:{}"
45 ),
46 "per_application_logs_rate_limit": BASE.format(
47 "per_application_logs_rate_limit:{}:{}"
48 ),
49 "per_application_metrics_rate_limit": BASE.format(
50 "per_application_metrics_rate_limit:{}:{}"
51 ),
45 'rate_limits': {
46 'per_application_reports_rate_limit': BASE.format(
47 'per_application_reports_limit:{}:{}'),
48 'per_application_logs_rate_limit': BASE.format(
49 'per_application_logs_rate_limit:{}:{}'),
50 'per_application_metrics_rate_limit': BASE.format(
51 'per_application_metrics_rate_limit:{}:{}'),
52 52 },
53 "apps_that_got_new_data_per_hour": BASE.format(
54 "apps_that_got_new_data_per_hour:{}"
55 ),
56 "apps_that_had_reports": BASE.format("apps_that_had_reports"),
57 "apps_that_had_error_reports": BASE.format("apps_that_had_error_reports"),
58 "apps_that_had_reports_alerting": BASE.format("apps_that_had_reports_alerting"),
59 "apps_that_had_error_reports_alerting": BASE.format(
60 "apps_that_had_error_reports_alerting"
61 ),
62 "reports_to_notify_per_type_per_app": BASE.format(
63 "reports_to_notify_per_type_per_app:{}:{}"
64 ),
65 "reports_to_notify_per_type_per_app_alerting": BASE.format(
66 "reports_to_notify_per_type_per_app_alerting:{}:{}"
67 ),
68 "seen_tag_list": BASE.format("seen_tag_list"),
53 'apps_that_got_new_data_per_hour': BASE.format('apps_that_got_new_data_per_hour:{}'),
54 'apps_that_had_reports': BASE.format('apps_that_had_reports'),
55 'apps_that_had_error_reports': BASE.format('apps_that_had_error_reports'),
56 'apps_that_had_reports_alerting': BASE.format(
57 'apps_that_had_reports_alerting'),
58 'apps_that_had_error_reports_alerting': BASE.format(
59 'apps_that_had_error_reports_alerting'),
60 'reports_to_notify_per_type_per_app': BASE.format(
61 'reports_to_notify_per_type_per_app:{}:{}'),
62 'reports_to_notify_per_type_per_app_alerting': BASE.format(
63 'reports_to_notify_per_type_per_app_alerting:{}:{}'),
64 'seen_tag_list': BASE.format('seen_tag_list')
69 65 }
@@ -1,131 +1,135 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors
4 4 #
5 5 # Licensed under the Apache License, Version 2.0 (the "License");
6 6 # you may not use this file except in compliance with the License.
7 7 # You may obtain a copy of the License at
8 8 #
9 9 # http://www.apache.org/licenses/LICENSE-2.0
10 10 #
11 11 # Unless required by applicable law or agreed to in writing, software
12 12 # distributed under the License is distributed on an "AS IS" BASIS,
13 13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 14 # See the License for the specific language governing permissions and
15 15 # limitations under the License.
16 16
17 17 import json
18 18
19 19 from pyramid.security import unauthenticated_userid
20 20
21 21 import appenlight.lib.helpers as helpers
22 22
23 23 from authomatic.providers import oauth2, oauth1
24 24 from authomatic import Authomatic
25 from ziggurat_foundations.models.services.user import UserService
25 from appenlight.models.user import User
26 26
27 27
28 28 class CSRFException(Exception):
29 29 pass
30 30
31 31
32 32 class JSONException(Exception):
33 33 pass
34 34
35 35
36 36 def get_csrf_token(request):
37 37 return request.session.get_csrf_token()
38 38
39 39
40 40 def safe_json_body(request):
41 41 """
42 42 Returns None if json body is missing or erroneous
43 43 """
44 44 try:
45 45 return request.json_body
46 46 except ValueError:
47 47 return None
48 48
49 49
50 50 def unsafe_json_body(request):
51 51 """
52 52 Throws JSONException if json can't deserialize
53 53 """
54 54 try:
55 55 return request.json_body
56 56 except ValueError:
57 raise JSONException("Incorrect JSON")
57 raise JSONException('Incorrect JSON')
58 58
59 59
60 60 def get_user(request):
61 if not request.path_info.startswith("/static"):
61 if not request.path_info.startswith('/static'):
62 62 user_id = unauthenticated_userid(request)
63 63 try:
64 64 user_id = int(user_id)
65 65 except Exception:
66 66 return None
67 67
68 68 if user_id:
69 user = UserService.by_id(user_id)
69 user = User.by_id(user_id)
70 70 if user:
71 request.environ["appenlight.username"] = "%d:%s" % (
72 user_id,
73 user.user_name,
74 )
71 request.environ['appenlight.username'] = '%d:%s' % (
72 user_id, user.user_name)
75 73 return user
76 74 else:
77 75 return None
78 76
79 77
80 78 def es_conn(request):
81 79 return request.registry.es_conn
82 80
83 81
84 82 def add_flash_to_headers(request, clear=True):
85 83 """
86 84 Adds pending flash messages to response, if clear is true clears out the
87 85 flash queue
88 86 """
89 87 flash_msgs = helpers.get_type_formatted_flash(request)
90 request.response.headers["x-flash-messages"] = json.dumps(flash_msgs)
88 request.response.headers['x-flash-messages'] = json.dumps(flash_msgs)
91 89 helpers.clear_flash(request)
92 90
93 91
94 92 def get_authomatic(request):
95 93 settings = request.registry.settings
96 94 # authomatic social auth
97 95 authomatic_conf = {
98 96 # callback http://yourapp.com/social_auth/twitter
99 "twitter": {
100 "class_": oauth1.Twitter,
101 "consumer_key": settings.get("authomatic.pr.twitter.key", ""),
102 "consumer_secret": settings.get("authomatic.pr.twitter.secret", ""),
97 'twitter': {
98 'class_': oauth1.Twitter,
99 'consumer_key': settings.get('authomatic.pr.twitter.key', ''),
100 'consumer_secret': settings.get('authomatic.pr.twitter.secret',
101 ''),
103 102 },
104 103 # callback http://yourapp.com/social_auth/facebook
105 "facebook": {
106 "class_": oauth2.Facebook,
107 "consumer_key": settings.get("authomatic.pr.facebook.app_id", ""),
108 "consumer_secret": settings.get("authomatic.pr.facebook.secret", ""),
109 "scope": ["email"],
104 'facebook': {
105 'class_': oauth2.Facebook,
106 'consumer_key': settings.get('authomatic.pr.facebook.app_id', ''),
107 'consumer_secret': settings.get('authomatic.pr.facebook.secret',
108 ''),
109 'scope': ['email'],
110 110 },
111 111 # callback http://yourapp.com/social_auth/google
112 "google": {
113 "class_": oauth2.Google,
114 "consumer_key": settings.get("authomatic.pr.google.key", ""),
115 "consumer_secret": settings.get("authomatic.pr.google.secret", ""),
116 "scope": ["profile", "email"],
112 'google': {
113 'class_': oauth2.Google,
114 'consumer_key': settings.get('authomatic.pr.google.key', ''),
115 'consumer_secret': settings.get(
116 'authomatic.pr.google.secret', ''),
117 'scope': ['profile', 'email'],
117 118 },
118 "github": {
119 "class_": oauth2.GitHub,
120 "consumer_key": settings.get("authomatic.pr.github.key", ""),
121 "consumer_secret": settings.get("authomatic.pr.github.secret", ""),
122 "scope": ["repo", "public_repo", "user:email"],
123 "access_headers": {"User-Agent": "AppEnlight"},
124 },
125 "bitbucket": {
126 "class_": oauth1.Bitbucket,
127 "consumer_key": settings.get("authomatic.pr.bitbucket.key", ""),
128 "consumer_secret": settings.get("authomatic.pr.bitbucket.secret", ""),
119 'github': {
120 'class_': oauth2.GitHub,
121 'consumer_key': settings.get('authomatic.pr.github.key', ''),
122 'consumer_secret': settings.get(
123 'authomatic.pr.github.secret', ''),
124 'scope': ['repo', 'public_repo', 'user:email'],
125 'access_headers': {'User-Agent': 'AppEnlight'},
129 126 },
127 'bitbucket': {
128 'class_': oauth1.Bitbucket,
129 'consumer_key': settings.get('authomatic.pr.bitbucket.key', ''),
130 'consumer_secret': settings.get(
131 'authomatic.pr.bitbucket.secret', '')
132 }
130 133 }
131 return Authomatic(config=authomatic_conf, secret=settings["authomatic.secret"])
134 return Authomatic(
135 config=authomatic_conf, secret=settings['authomatic.secret'])
@@ -1,312 +1,298 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors
4 4 #
5 5 # Licensed under the Apache License, Version 2.0 (the "License");
6 6 # you may not use this file except in compliance with the License.
7 7 # You may obtain a copy of the License at
8 8 #
9 9 # http://www.apache.org/licenses/LICENSE-2.0
10 10 #
11 11 # Unless required by applicable law or agreed to in writing, software
12 12 # distributed under the License is distributed on an "AS IS" BASIS,
13 13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 14 # See the License for the specific language governing permissions and
15 15 # limitations under the License.
16 16
17 17 import logging
18 18 import operator
19 19
20 20 log = logging.getLogger(__name__)
21 21
22 22
23 23 class RuleException(Exception):
24 24 pass
25 25
26 26
27 27 class KeyNotFoundException(RuleException):
28 28 pass
29 29
30 30
31 31 class UnknownTypeException(RuleException):
32 32 pass
33 33
34 34
35 35 class BadConfigException(RuleException):
36 36 pass
37 37
38 38
39 39 class InvalidValueException(RuleException):
40 40 pass
41 41
42 42
43 43 class RuleBase(object):
44 44 @classmethod
45 45 def default_dict_struct_getter(cls, struct, field_name):
46 46 """
47 47 returns a key from dictionary based on field_name, if the name contains
48 48 `:` then it means additional nesting levels should be checked for the
49 49 key so `a:b:c` means return struct['a']['b']['c']
50 50
51 51 :param struct:
52 52 :param field_name:
53 53 :return:
54 54 """
55 parts = field_name.split(":") if field_name else []
55 parts = field_name.split(':') if field_name else []
56 56 found = struct
57 57 while parts:
58 58 current_key = parts.pop(0)
59 59 found = found.get(current_key)
60 60 if not found and parts:
61 raise KeyNotFoundException("Key not found in structure")
61 raise KeyNotFoundException('Key not found in structure')
62 62 return found
63 63
64 64 @classmethod
65 65 def default_obj_struct_getter(cls, struct, field_name):
66 66 """
67 67 returns a key from instance based on field_name, if the name contains
68 68 `:` then it means additional nesting levels should be checked for the
69 69 key so `a:b:c` means return struct.a.b.c
70 70
71 71 :param struct:
72 72 :param field_name:
73 73 :return:
74 74 """
75 parts = field_name.split(":")
75 parts = field_name.split(':')
76 76 found = struct
77 77 while parts:
78 78 current_key = parts.pop(0)
79 79 found = getattr(found, current_key, None)
80 80 if not found and parts:
81 raise KeyNotFoundException("Key not found in structure")
81 raise KeyNotFoundException('Key not found in structure')
82 82 return found
83 83
84 84 def normalized_type(self, field, value):
85 85 """
86 86 Converts text values from self.conf_value based on type_matrix below
87 87 check_matrix defines what kind of checks we can perform on a field
88 88 value based on field name
89 89 """
90 90 f_type = self.type_matrix.get(field)
91 91 if f_type:
92 cast_to = f_type["type"]
92 cast_to = f_type['type']
93 93 else:
94 raise UnknownTypeException("Unknown type")
94 raise UnknownTypeException('Unknown type')
95 95
96 96 if value is None:
97 97 return None
98 98
99 99 try:
100 if cast_to == "int":
100 if cast_to == 'int':
101 101 return int(value)
102 elif cast_to == "float":
102 elif cast_to == 'float':
103 103 return float(value)
104 elif cast_to == "unicode":
104 elif cast_to == 'unicode':
105 105 return str(value)
106 106 except ValueError as exc:
107 107 raise InvalidValueException(exc)
108 108
109 109
110 110 class Rule(RuleBase):
111 def __init__(
112 self,
113 config,
114 type_matrix,
115 struct_getter=RuleBase.default_dict_struct_getter,
116 config_manipulator=None,
117 ):
111 def __init__(self, config, type_matrix,
112 struct_getter=RuleBase.default_dict_struct_getter,
113 config_manipulator=None):
118 114 """
119 115
120 116 :param config: dict - contains rule configuration
121 117 example::
122 118 {
123 119 "field": "__OR__",
124 120 "rules": [
125 121 {
126 122 "field": "__AND__",
127 123 "rules": [
128 124 {
129 125 "op": "ge",
130 126 "field": "occurences",
131 127 "value": "10"
132 128 },
133 129 {
134 130 "op": "ge",
135 131 "field": "priority",
136 132 "value": "4"
137 133 }
138 134 ]
139 135 },
140 136 {
141 137 "op": "eq",
142 138 "field": "http_status",
143 139 "value": "500"
144 140 }
145 141 ]
146 142 }
147 143 :param type_matrix: dict - contains map of type casts
148 144 example::
149 145 {
150 146 'http_status': 'int',
151 147 'priority': 'unicode',
152 148 }
153 149 :param struct_getter: callable - used to grab the value of field from
154 150 the structure passed to match() based
155 151 on key, default
156 152
157 153 """
158 154 self.type_matrix = type_matrix
159 155 self.config = config
160 156 self.struct_getter = struct_getter
161 157 self.config_manipulator = config_manipulator
162 158 if config_manipulator:
163 159 config_manipulator(self)
164 160
165 161 def subrule_check(self, rule_config, struct):
166 rule = Rule(
167 rule_config, self.type_matrix, config_manipulator=self.config_manipulator
168 )
162 rule = Rule(rule_config, self.type_matrix,
163 config_manipulator=self.config_manipulator)
169 164 return rule.match(struct)
170 165
171 166 def match(self, struct):
172 167 """
173 168 Check if rule matched for this specific report
174 169 First tries report value, then tests tags in not found, then finally
175 170 report group
176 171 """
177 field_name = self.config.get("field")
178 test_value = self.config.get("value")
172 field_name = self.config.get('field')
173 test_value = self.config.get('value')
179 174
180 175 if not field_name:
181 176 return False
182 177
183 if field_name == "__AND__":
184 rule = AND(
185 self.config["rules"],
186 self.type_matrix,
187 config_manipulator=self.config_manipulator,
188 )
178 if field_name == '__AND__':
179 rule = AND(self.config['rules'], self.type_matrix,
180 config_manipulator=self.config_manipulator)
189 181 return rule.match(struct)
190 elif field_name == "__OR__":
191 rule = OR(
192 self.config["rules"],
193 self.type_matrix,
194 config_manipulator=self.config_manipulator,
195 )
182 elif field_name == '__OR__':
183 rule = OR(self.config['rules'], self.type_matrix,
184 config_manipulator=self.config_manipulator)
196 185 return rule.match(struct)
197 elif field_name == "__NOT__":
198 rule = NOT(
199 self.config["rules"],
200 self.type_matrix,
201 config_manipulator=self.config_manipulator,
202 )
186 elif field_name == '__NOT__':
187 rule = NOT(self.config['rules'], self.type_matrix,
188 config_manipulator=self.config_manipulator)
203 189 return rule.match(struct)
204 190
205 191 if test_value is None:
206 192 return False
207 193
208 194 try:
209 struct_value = self.normalized_type(
210 field_name, self.struct_getter(struct, field_name)
211 )
195 struct_value = self.normalized_type(field_name,
196 self.struct_getter(struct,
197 field_name))
212 198 except (UnknownTypeException, InvalidValueException) as exc:
213 199 log.error(str(exc))
214 200 return False
215 201
216 202 try:
217 203 test_value = self.normalized_type(field_name, test_value)
218 204 except (UnknownTypeException, InvalidValueException) as exc:
219 205 log.error(str(exc))
220 206 return False
221 207
222 if self.config["op"] not in ("startswith", "endswith", "contains"):
208 if self.config['op'] not in ('startswith', 'endswith', 'contains'):
223 209 try:
224 return getattr(operator, self.config["op"])(struct_value, test_value)
210 return getattr(operator,
211 self.config['op'])(struct_value, test_value)
225 212 except TypeError:
226 213 return False
227 elif self.config["op"] == "startswith":
214 elif self.config['op'] == 'startswith':
228 215 return struct_value.startswith(test_value)
229 elif self.config["op"] == "endswith":
216 elif self.config['op'] == 'endswith':
230 217 return struct_value.endswith(test_value)
231 elif self.config["op"] == "contains":
218 elif self.config['op'] == 'contains':
232 219 return test_value in struct_value
233 raise BadConfigException(
234 "Invalid configuration, " "unknown operator: {}".format(self.config)
235 )
220 raise BadConfigException('Invalid configuration, '
221 'unknown operator: {}'.format(self.config))
236 222
237 223 def __repr__(self):
238 return "<Rule {} {}>".format(self.config.get("field"), self.config.get("value"))
224 return '<Rule {} {}>'.format(self.config.get('field'),
225 self.config.get('value'))
239 226
240 227
241 228 class AND(Rule):
242 229 def __init__(self, rules, *args, **kwargs):
243 230 super(AND, self).__init__({}, *args, **kwargs)
244 231 self.rules = rules
245 232
246 233 def match(self, struct):
247 return all([self.subrule_check(r_conf, struct) for r_conf in self.rules])
234 return all([self.subrule_check(r_conf, struct) for r_conf
235 in self.rules])
248 236
249 237
250 238 class NOT(Rule):
251 239 def __init__(self, rules, *args, **kwargs):
252 240 super(NOT, self).__init__({}, *args, **kwargs)
253 241 self.rules = rules
254 242
255 243 def match(self, struct):
256 return all([not self.subrule_check(r_conf, struct) for r_conf in self.rules])
244 return all([not self.subrule_check(r_conf, struct) for r_conf
245 in self.rules])
257 246
258 247
259 248 class OR(Rule):
260 249 def __init__(self, rules, *args, **kwargs):
261 250 super(OR, self).__init__({}, *args, **kwargs)
262 251 self.rules = rules
263 252
264 253 def match(self, struct):
265 return any([self.subrule_check(r_conf, struct) for r_conf in self.rules])
254 return any([self.subrule_check(r_conf, struct) for r_conf
255 in self.rules])
266 256
267 257
268 258 class RuleService(object):
269 259 @staticmethod
270 def rule_from_config(config, field_mappings, labels_dict, manipulator_func=None):
260 def rule_from_config(config, field_mappings, labels_dict,
261 manipulator_func=None):
271 262 """
272 263 Returns modified rule with manipulator function
273 264 By default manipulator function replaces field id from labels_dict
274 265 with current field id proper for the rule from fields_mappings
275 266
276 267 because label X_X id might be pointing different value on next request
277 268 when new term is returned from elasticsearch - this ensures things
278 269 are kept 1:1 all the time
279 270 """
280 271 rev_map = {}
281 272 for k, v in labels_dict.items():
282 rev_map[(v["agg"], v["key"])] = k
273 rev_map[(v['agg'], v['key'],)] = k
283 274
284 275 if manipulator_func is None:
285
286 276 def label_rewriter_func(rule):
287 field = rule.config.get("field")
288 if not field or rule.config["field"] in [
289 "__OR__",
290 "__AND__",
291 "__NOT__",
292 ]:
277 field = rule.config.get('field')
278 if not field or rule.config['field'] in ['__OR__',
279 '__AND__', '__NOT__']:
293 280 return
294 281
295 to_map = field_mappings.get(rule.config["field"])
282 to_map = field_mappings.get(rule.config['field'])
296 283
297 284 # we need to replace series field with _AE_NOT_FOUND_ to not match
298 285 # accidently some other field which happens to have the series that
299 286 # was used when the alert was created
300 287 if to_map:
301 to_replace = rev_map.get(
302 (to_map["agg"], to_map["key"]), "_AE_NOT_FOUND_"
303 )
288 to_replace = rev_map.get((to_map['agg'], to_map['key'],),
289 '_AE_NOT_FOUND_')
304 290 else:
305 to_replace = "_AE_NOT_FOUND_"
291 to_replace = '_AE_NOT_FOUND_'
306 292
307 rule.config["field"] = to_replace
308 rule.type_matrix[to_replace] = {"type": "float"}
293 rule.config['field'] = to_replace
294 rule.type_matrix[to_replace] = {"type": 'float'}
309 295
310 296 manipulator_func = label_rewriter_func
311 297
312 298 return Rule(config, {}, config_manipulator=manipulator_func)
@@ -1,62 +1,60 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors
4 4 #
5 5 # Licensed under the Apache License, Version 2.0 (the "License");
6 6 # you may not use this file except in compliance with the License.
7 7 # You may obtain a copy of the License at
8 8 #
9 9 # http://www.apache.org/licenses/LICENSE-2.0
10 10 #
11 11 # Unless required by applicable law or agreed to in writing, software
12 12 # distributed under the License is distributed on an "AS IS" BASIS,
13 13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 14 # See the License for the specific language governing permissions and
15 15 # limitations under the License.
16 16
17 from ziggurat_foundations.models.services.external_identity import (
18 ExternalIdentityService,
19 )
17 from ziggurat_foundations.models.services.external_identity import \
18 ExternalIdentityService
20 19 from appenlight.models.external_identity import ExternalIdentity
21 20
22 21
23 22 def handle_social_data(request, user, social_data):
24 23 social_data = social_data
25 24 update_identity = False
26 25
27 26 extng_id = ExternalIdentityService.by_external_id_and_provider(
28 social_data["user"]["id"], social_data["credentials"].provider_name
27 social_data['user']['id'],
28 social_data['credentials'].provider_name
29 29 )
30 30
31 31 # fix legacy accounts with wrong google ID
32 if not extng_id and social_data["credentials"].provider_name == "google":
32 if not extng_id and social_data['credentials'].provider_name == 'google':
33 33 extng_id = ExternalIdentityService.by_external_id_and_provider(
34 social_data["user"]["email"], social_data["credentials"].provider_name
34 social_data['user']['email'],
35 social_data['credentials'].provider_name
35 36 )
36 37
37 38 if extng_id:
38 39 extng_id.delete()
39 40 update_identity = True
40 41
41 if not social_data["user"]["id"]:
42 if not social_data['user']['id']:
42 43 request.session.flash(
43 "No external user id found? Perhaps permissions for "
44 "authentication are set incorrectly",
45 "error",
46 )
44 'No external user id found? Perhaps permissions for '
45 'authentication are set incorrectly', 'error')
47 46 return False
48 47
49 48 if not extng_id or update_identity:
50 49 if not update_identity:
51 request.session.flash(
52 "Your external identity is now " "connected with your account"
53 )
50 request.session.flash('Your external identity is now '
51 'connected with your account')
54 52 ex_identity = ExternalIdentity()
55 ex_identity.external_id = social_data["user"]["id"]
56 ex_identity.external_user_name = social_data["user"]["user_name"]
57 ex_identity.provider_name = social_data["credentials"].provider_name
58 ex_identity.access_token = social_data["credentials"].token
59 ex_identity.token_secret = social_data["credentials"].token_secret
60 ex_identity.alt_token = social_data["credentials"].refresh_token
53 ex_identity.external_id = social_data['user']['id']
54 ex_identity.external_user_name = social_data['user']['user_name']
55 ex_identity.provider_name = social_data['credentials'].provider_name
56 ex_identity.access_token = social_data['credentials'].token
57 ex_identity.token_secret = social_data['credentials'].token_secret
58 ex_identity.alt_token = social_data['credentials'].refresh_token
61 59 user.external_identities.append(ex_identity)
62 request.session.pop("zigg.social_auth", None)
60 request.session.pop('zigg.social_auth', None)
@@ -1,560 +1,490 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors
4 4 #
5 5 # Licensed under the Apache License, Version 2.0 (the "License");
6 6 # you may not use this file except in compliance with the License.
7 7 # You may obtain a copy of the License at
8 8 #
9 9 # http://www.apache.org/licenses/LICENSE-2.0
10 10 #
11 11 # Unless required by applicable law or agreed to in writing, software
12 12 # distributed under the License is distributed on an "AS IS" BASIS,
13 13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 14 # See the License for the specific language governing permissions and
15 15 # limitations under the License.
16 16
17 17 """
18 18 Utility functions.
19 19 """
20 20 import logging
21 21 import requests
22 22 import hashlib
23 23 import json
24 24 import copy
25 25 import uuid
26 26 import appenlight.lib.helpers as h
27 27 from collections import namedtuple
28 28 from datetime import timedelta, datetime, date
29 29 from dogpile.cache.api import NO_VALUE
30 30 from appenlight.models import Datastores
31 from appenlight.validators import LogSearchSchema, TagListSchema, accepted_search_params
31 from appenlight.validators import (LogSearchSchema,
32 TagListSchema,
33 accepted_search_params)
32 34 from itsdangerous import TimestampSigner
33 35 from ziggurat_foundations.permissions import ALL_PERMISSIONS
34 from ziggurat_foundations.models.services.user import UserService
35 36 from dateutil.relativedelta import relativedelta
36 37 from dateutil.rrule import rrule, MONTHLY, DAILY
37 38
38 39 log = logging.getLogger(__name__)
39 40
40 41
41 Stat = namedtuple("Stat", "start_interval value")
42 Stat = namedtuple('Stat', 'start_interval value')
42 43
43 44
44 45 def default_extractor(item):
45 46 """
46 47 :param item - item to extract date from
47 48 """
48 if hasattr(item, "start_interval"):
49 if hasattr(item, 'start_interval'):
49 50 return item.start_interval
50 return item["start_interval"]
51 return item['start_interval']
51 52
52 53
53 54 # fast gap generator
54 def gap_gen_default(start, step, itemiterator, end_time=None, iv_extractor=None):
55 def gap_gen_default(start, step, itemiterator, end_time=None,
56 iv_extractor=None):
55 57 """ generates a list of time/value items based on step and itemiterator
56 58 if there are entries missing from iterator time/None will be returned
57 59 instead
58 60 :param start - datetime - what time should we start generating our values
59 61 :param step - timedelta - stepsize
60 62 :param itemiterator - iterable - we will check this iterable for values
61 63 corresponding to generated steps
62 64 :param end_time - datetime - when last step is >= end_time stop iterating
63 65 :param iv_extractor - extracts current step from iterable items
64 66 """
65 67
66 68 if not iv_extractor:
67 69 iv_extractor = default_extractor
68 70
69 71 next_step = start
70 72 minutes = step.total_seconds() / 60.0
71 73 while next_step.minute % minutes != 0:
72 74 next_step = next_step.replace(minute=next_step.minute - 1)
73 75 for item in itemiterator:
74 76 item_start_interval = iv_extractor(item)
75 77 # do we have a match for current time step in our data?
76 78 # no gen a new tuple with 0 values
77 79 while next_step < item_start_interval:
78 80 yield Stat(next_step, None)
79 81 next_step = next_step + step
80 82 if next_step == item_start_interval:
81 83 yield Stat(item_start_interval, item)
82 84 next_step = next_step + step
83 85 if end_time:
84 86 while next_step < end_time:
85 87 yield Stat(next_step, None)
86 88 next_step = next_step + step
87 89
88 90
89 91 class DateTimeEncoder(json.JSONEncoder):
90 92 """ Simple datetime to ISO encoder for json serialization"""
91 93
92 94 def default(self, obj):
93 95 if isinstance(obj, date):
94 96 return obj.isoformat()
95 97 if isinstance(obj, datetime):
96 98 return obj.isoformat()
97 99 return json.JSONEncoder.default(self, obj)
98 100
99 101
100 def channelstream_request(
101 secret, endpoint, payload, throw_exceptions=False, servers=None
102 ):
102 def channelstream_request(secret, endpoint, payload, throw_exceptions=False,
103 servers=None):
103 104 responses = []
104 105 if not servers:
105 106 servers = []
106 107
107 108 signer = TimestampSigner(secret)
108 109 sig_for_server = signer.sign(endpoint)
109 for secret, server in [(s["secret"], s["server"]) for s in servers]:
110 for secret, server in [(s['secret'], s['server']) for s in servers]:
110 111 response = {}
111 secret_headers = {
112 "x-channelstream-secret": sig_for_server,
113 "x-channelstream-endpoint": endpoint,
114 "Content-Type": "application/json",
115 }
116 url = "%s%s" % (server, endpoint)
112 secret_headers = {'x-channelstream-secret': sig_for_server,
113 'x-channelstream-endpoint': endpoint,
114 'Content-Type': 'application/json'}
115 url = '%s%s' % (server, endpoint)
117 116 try:
118 response = requests.post(
119 url,
120 data=json.dumps(payload, cls=DateTimeEncoder),
121 headers=secret_headers,
122 verify=False,
123 timeout=2,
124 ).json()
117 response = requests.post(url,
118 data=json.dumps(payload,
119 cls=DateTimeEncoder),
120 headers=secret_headers,
121 verify=False,
122 timeout=2).json()
125 123 except requests.exceptions.RequestException as e:
126 124 if throw_exceptions:
127 125 raise
128 126 responses.append(response)
129 127 return responses
130 128
131 129
132 130 def add_cors_headers(response):
133 131 # allow CORS
134 response.headers.add("Access-Control-Allow-Origin", "*")
135 response.headers.add("XDomainRequestAllowed", "1")
136 response.headers.add("Access-Control-Allow-Methods", "GET, POST, OPTIONS")
132 response.headers.add('Access-Control-Allow-Origin', '*')
133 response.headers.add('XDomainRequestAllowed', '1')
134 response.headers.add('Access-Control-Allow-Methods', 'GET, POST, OPTIONS')
137 135 # response.headers.add('Access-Control-Allow-Credentials', 'true')
138 response.headers.add(
139 "Access-Control-Allow-Headers",
140 "Content-Type, Depth, User-Agent, X-File-Size, X-Requested-With, If-Modified-Since, X-File-Name, Cache-Control, Pragma, Origin, Connection, Referer, Cookie",
141 )
142 response.headers.add("Access-Control-Max-Age", "86400")
136 response.headers.add('Access-Control-Allow-Headers',
137 'Content-Type, Depth, User-Agent, X-File-Size, X-Requested-With, If-Modified-Since, X-File-Name, Cache-Control, Pragma, Origin, Connection, Referer, Cookie')
138 response.headers.add('Access-Control-Max-Age', '86400')
143 139
144 140
145 141 from sqlalchemy.sql import compiler
146 142 from psycopg2.extensions import adapt as sqlescape
147 143
148 144
149 145 # or use the appropiate escape function from your db driver
150 146
151
152 147 def compile_query(query):
153 148 dialect = query.session.bind.dialect
154 149 statement = query.statement
155 150 comp = compiler.SQLCompiler(dialect, statement)
156 151 comp.compile()
157 152 enc = dialect.encoding
158 153 params = {}
159 154 for k, v in comp.params.items():
160 155 if isinstance(v, str):
161 156 v = v.encode(enc)
162 157 params[k] = sqlescape(v)
163 158 return (comp.string.encode(enc) % params).decode(enc)
164 159
165 160
166 161 def convert_es_type(input_data):
167 162 """
168 163 This might need to convert some text or other types to corresponding ES types
169 164 """
170 165 return str(input_data)
171 166
172 167
173 ProtoVersion = namedtuple("ProtoVersion", ["major", "minor", "patch"])
168 ProtoVersion = namedtuple('ProtoVersion', ['major', 'minor', 'patch'])
174 169
175 170
176 171 def parse_proto(input_data):
177 172 try:
178 parts = [int(x) for x in input_data.split(".")]
173 parts = [int(x) for x in input_data.split('.')]
179 174 while len(parts) < 3:
180 175 parts.append(0)
181 176 return ProtoVersion(*parts)
182 177 except Exception as e:
183 log.info("Unknown protocol version: %s" % e)
178 log.info('Unknown protocol version: %s' % e)
184 179 return ProtoVersion(99, 99, 99)
185 180
186 181
187 def es_index_name_limiter(
188 start_date=None, end_date=None, months_in_past=6, ixtypes=None
189 ):
182 def es_index_name_limiter(start_date=None, end_date=None, months_in_past=6,
183 ixtypes=None):
190 184 """
191 185 This function limits the search to 6 months by default so we don't have to
192 186 query 300 elasticsearch indices for 20 years of historical data for example
193 187 """
194 188
195 189 # should be cached later
196 190 def get_possible_names():
197 return list(Datastores.es.indices.get_alias("*"))
191 return list(Datastores.es.aliases().keys())
198 192
199 193 possible_names = get_possible_names()
200 194 es_index_types = []
201 195 if not ixtypes:
202 ixtypes = ["reports", "metrics", "logs"]
196 ixtypes = ['reports', 'metrics', 'logs']
203 197 for t in ixtypes:
204 if t == "reports":
205 es_index_types.append("rcae_r_%s")
206 elif t == "logs":
207 es_index_types.append("rcae_l_%s")
208 elif t == "metrics":
209 es_index_types.append("rcae_m_%s")
210 elif t == "uptime":
211 es_index_types.append("rcae_uptime_ce_%s")
212 elif t == "slow_calls":
213 es_index_types.append("rcae_sc_%s")
198 if t == 'reports':
199 es_index_types.append('rcae_r_%s')
200 elif t == 'logs':
201 es_index_types.append('rcae_l_%s')
202 elif t == 'metrics':
203 es_index_types.append('rcae_m_%s')
204 elif t == 'uptime':
205 es_index_types.append('rcae_u_%s')
206 elif t == 'slow_calls':
207 es_index_types.append('rcae_sc_%s')
214 208
215 209 if start_date:
216 210 start_date = copy.copy(start_date)
217 211 else:
218 212 if not end_date:
219 213 end_date = datetime.utcnow()
220 214 start_date = end_date + relativedelta(months=months_in_past * -1)
221 215
222 216 if not end_date:
223 217 end_date = start_date + relativedelta(months=months_in_past)
224 218
225 index_dates = list(
226 rrule(
227 MONTHLY,
228 dtstart=start_date.date().replace(day=1),
229 until=end_date.date(),
230 count=36,
231 )
232 )
219 index_dates = list(rrule(MONTHLY,
220 dtstart=start_date.date().replace(day=1),
221 until=end_date.date(),
222 count=36))
233 223 index_names = []
234 224 for ix_type in es_index_types:
235 to_extend = [
236 ix_type % d.strftime("%Y_%m")
237 for d in index_dates
238 if ix_type % d.strftime("%Y_%m") in possible_names
239 ]
225 to_extend = [ix_type % d.strftime('%Y_%m') for d in index_dates
226 if ix_type % d.strftime('%Y_%m') in possible_names]
240 227 index_names.extend(to_extend)
241 for day in list(
242 rrule(DAILY, dtstart=start_date.date(), until=end_date.date(), count=366)
243 ):
244 ix_name = ix_type % day.strftime("%Y_%m_%d")
228 for day in list(rrule(DAILY, dtstart=start_date.date(),
229 until=end_date.date(), count=366)):
230 ix_name = ix_type % day.strftime('%Y_%m_%d')
245 231 if ix_name in possible_names:
246 232 index_names.append(ix_name)
247 233 return index_names
248 234
249 235
250 236 def build_filter_settings_from_query_dict(
251 request, params=None, override_app_ids=None, resource_permissions=None
252 ):
237 request, params=None, override_app_ids=None,
238 resource_permissions=None):
253 239 """
254 240 Builds list of normalized search terms for ES from query params
255 241 ensuring application list is restricted to only applications user
256 242 has access to
257 243
258 244 :param params (dictionary)
259 245 :param override_app_ids - list of application id's to use instead of
260 246 applications user normally has access to
261 247 """
262 248 params = copy.deepcopy(params)
263 249 applications = []
264 250 if not resource_permissions:
265 resource_permissions = ["view"]
251 resource_permissions = ['view']
266 252
267 253 if request.user:
268 applications = UserService.resources_with_perms(
269 request.user, resource_permissions, resource_types=["application"]
270 )
254 applications = request.user.resources_with_perms(
255 resource_permissions, resource_types=['application'])
271 256
272 257 # CRITICAL - this ensures our resultset is limited to only the ones
273 258 # user has view permissions
274 259 all_possible_app_ids = set([app.resource_id for app in applications])
275 260
276 261 # if override is preset we force permission for app to be present
277 262 # this allows users to see dashboards and applications they would
278 263 # normally not be able to
279 264
280 265 if override_app_ids:
281 266 all_possible_app_ids = set(override_app_ids)
282 267
283 268 schema = LogSearchSchema().bind(resources=all_possible_app_ids)
284 269 tag_schema = TagListSchema()
285 270 filter_settings = schema.deserialize(params)
286 271 tag_list = []
287 272 for k, v in list(filter_settings.items()):
288 273 if k in accepted_search_params:
289 274 continue
290 tag_list.append({"name": k, "value": v, "op": "eq"})
275 tag_list.append({"name": k, "value": v, "op": 'eq'})
291 276 # remove the key from filter_settings
292 277 filter_settings.pop(k, None)
293 278 tags = tag_schema.deserialize(tag_list)
294 filter_settings["tags"] = tags
279 filter_settings['tags'] = tags
295 280 return filter_settings
296 281
297 282
298 283 def gen_uuid():
299 284 return str(uuid.uuid4())
300 285
301 286
302 287 def gen_uuid4_sha_hex():
303 288 return hashlib.sha1(uuid.uuid4().bytes).hexdigest()
304 289
305 290
306 291 def permission_tuple_to_dict(data):
307 292 out = {
308 293 "user_name": None,
309 294 "perm_name": data.perm_name,
310 295 "owner": data.owner,
311 296 "type": data.type,
312 297 "resource_name": None,
313 298 "resource_type": None,
314 299 "resource_id": None,
315 300 "group_name": None,
316 "group_id": None,
301 "group_id": None
317 302 }
318 303 if data.user:
319 304 out["user_name"] = data.user.user_name
320 305 if data.perm_name == ALL_PERMISSIONS:
321 out["perm_name"] = "__all_permissions__"
306 out['perm_name'] = '__all_permissions__'
322 307 if data.resource:
323 out["resource_name"] = data.resource.resource_name
324 out["resource_type"] = data.resource.resource_type
325 out["resource_id"] = data.resource.resource_id
308 out['resource_name'] = data.resource.resource_name
309 out['resource_type'] = data.resource.resource_type
310 out['resource_id'] = data.resource.resource_id
326 311 if data.group:
327 out["group_name"] = data.group.group_name
328 out["group_id"] = data.group.id
312 out['group_name'] = data.group.group_name
313 out['group_id'] = data.group.id
329 314 return out
330 315
331 316
332 def get_cached_buckets(
333 request,
334 stats_since,
335 end_time,
336 fn,
337 cache_key,
338 gap_gen=None,
339 db_session=None,
340 step_interval=None,
341 iv_extractor=None,
342 rerange=False,
343 *args,
344 **kwargs
345 ):
317 def get_cached_buckets(request, stats_since, end_time, fn, cache_key,
318 gap_gen=None, db_session=None, step_interval=None,
319 iv_extractor=None,
320 rerange=False, *args, **kwargs):
346 321 """ Takes "fn" that should return some data and tries to load the data
347 322 dividing it into daily buckets - if the stats_since and end time give a
348 323 delta bigger than 24hours, then only "todays" data is computed on the fly
349 324
350 325 :param request: (request) request object
351 326 :param stats_since: (datetime) start date of buckets range
352 327 :param end_time: (datetime) end date of buckets range - utcnow() if None
353 328 :param fn: (callable) callable to use to populate buckets should have
354 329 following signature:
355 330 def get_data(request, since_when, until, *args, **kwargs):
356 331
357 332 :param cache_key: (string) cache key that will be used to build bucket
358 333 caches
359 334 :param gap_gen: (callable) gap generator - should return step intervals
360 335 to use with out `fn` callable
361 336 :param db_session: (Session) sqlalchemy session
362 337 :param step_interval: (timedelta) optional step interval if we want to
363 338 override the default determined from total start/end time delta
364 339 :param iv_extractor: (callable) used to get step intervals from data
365 340 returned by `fn` callable
366 341 :param rerange: (bool) handy if we want to change ranges from hours to
367 342 days when cached data is missing - will shorten execution time if `fn`
368 343 callable supports that and we are working with multiple rows - like metrics
369 344 :param args:
370 345 :param kwargs:
371 346
372 347 :return: iterable
373 348 """
374 349 if not end_time:
375 350 end_time = datetime.utcnow().replace(second=0, microsecond=0)
376 351 delta = end_time - stats_since
377 352 # if smaller than 3 days we want to group by 5min else by 1h,
378 353 # for 60 min group by min
379 354 if not gap_gen:
380 355 gap_gen = gap_gen_default
381 356 if not iv_extractor:
382 357 iv_extractor = default_extractor
383 358
384 359 # do not use custom interval if total time range with new iv would exceed
385 360 # end time
386 361 if not step_interval or stats_since + step_interval >= end_time:
387 if delta < h.time_deltas.get("12h")["delta"]:
362 if delta < h.time_deltas.get('12h')['delta']:
388 363 step_interval = timedelta(seconds=60)
389 elif delta < h.time_deltas.get("3d")["delta"]:
364 elif delta < h.time_deltas.get('3d')['delta']:
390 365 step_interval = timedelta(seconds=60 * 5)
391 elif delta > h.time_deltas.get("2w")["delta"]:
366 elif delta > h.time_deltas.get('2w')['delta']:
392 367 step_interval = timedelta(days=1)
393 368 else:
394 369 step_interval = timedelta(minutes=60)
395 370
396 371 if step_interval >= timedelta(minutes=60):
397 log.info(
398 "cached_buckets:{}: adjusting start time "
399 "for hourly or daily intervals".format(cache_key)
400 )
372 log.info('cached_buckets:{}: adjusting start time '
373 'for hourly or daily intervals'.format(cache_key))
401 374 stats_since = stats_since.replace(hour=0, minute=0)
402 375
403 ranges = [
404 i.start_interval
405 for i in list(gap_gen(stats_since, step_interval, [], end_time=end_time))
406 ]
376 ranges = [i.start_interval for i in list(gap_gen(stats_since,
377 step_interval, [],
378 end_time=end_time))]
407 379 buckets = {}
408 storage_key = "buckets:" + cache_key + "{}|{}"
380 storage_key = 'buckets:' + cache_key + '{}|{}'
409 381 # this means we basicly cache per hour in 3-14 day intervals but i think
410 382 # its fine at this point - will be faster than db access anyways
411 383
412 384 if len(ranges) >= 1:
413 385 last_ranges = [ranges[-1]]
414 386 else:
415 387 last_ranges = []
416 388 if step_interval >= timedelta(minutes=60):
417 389 for r in ranges:
418 390 k = storage_key.format(step_interval.total_seconds(), r)
419 391 value = request.registry.cache_regions.redis_day_30.get(k)
420 392 # last buckets are never loaded from cache
421 is_last_result = r >= end_time - timedelta(hours=6) or r in last_ranges
393 is_last_result = (
394 r >= end_time - timedelta(hours=6) or r in last_ranges)
422 395 if value is not NO_VALUE and not is_last_result:
423 log.info(
424 "cached_buckets:{}: "
425 "loading range {} from cache".format(cache_key, r)
426 )
396 log.info("cached_buckets:{}: "
397 "loading range {} from cache".format(cache_key, r))
427 398 buckets[r] = value
428 399 else:
429 log.info(
430 "cached_buckets:{}: "
431 "loading range {} from storage".format(cache_key, r)
432 )
400 log.info("cached_buckets:{}: "
401 "loading range {} from storage".format(cache_key, r))
433 402 range_size = step_interval
434 if (
435 step_interval == timedelta(minutes=60)
436 and not is_last_result
437 and rerange
438 ):
403 if (step_interval == timedelta(minutes=60) and
404 not is_last_result and rerange):
439 405 range_size = timedelta(days=1)
440 406 r = r.replace(hour=0, minute=0)
441 log.info(
442 "cached_buckets:{}: "
443 "loading collapsed "
444 "range {} {}".format(cache_key, r, r + range_size)
445 )
407 log.info("cached_buckets:{}: "
408 "loading collapsed "
409 "range {} {}".format(cache_key, r,
410 r + range_size))
446 411 bucket_data = fn(
447 request,
448 r,
449 r + range_size,
450 step_interval,
451 gap_gen,
452 bucket_count=len(ranges),
453 *args,
454 **kwargs
455 )
412 request, r, r + range_size, step_interval,
413 gap_gen, bucket_count=len(ranges), *args, **kwargs)
456 414 for b in bucket_data:
457 415 b_iv = iv_extractor(b)
458 416 buckets[b_iv] = b
459 k2 = storage_key.format(step_interval.total_seconds(), b_iv)
417 k2 = storage_key.format(
418 step_interval.total_seconds(), b_iv)
460 419 request.registry.cache_regions.redis_day_30.set(k2, b)
461 420 log.info("cached_buckets:{}: saving cache".format(cache_key))
462 421 else:
463 422 # bucket count is 1 for short time ranges <= 24h from now
464 bucket_data = fn(
465 request,
466 stats_since,
467 end_time,
468 step_interval,
469 gap_gen,
470 bucket_count=1,
471 *args,
472 **kwargs
473 )
423 bucket_data = fn(request, stats_since, end_time, step_interval,
424 gap_gen, bucket_count=1, *args, **kwargs)
474 425 for b in bucket_data:
475 426 buckets[iv_extractor(b)] = b
476 427 return buckets
477 428
478 429
479 def get_cached_split_data(
480 request, stats_since, end_time, fn, cache_key, db_session=None, *args, **kwargs
481 ):
430 def get_cached_split_data(request, stats_since, end_time, fn, cache_key,
431 db_session=None, *args, **kwargs):
482 432 """ Takes "fn" that should return some data and tries to load the data
483 433 dividing it into 2 buckets - cached "since_from" bucket and "today"
484 434 bucket - then the data can be reduced into single value
485 435
486 436 Data is cached if the stats_since and end time give a delta bigger
487 437 than 24hours - then only 24h is computed on the fly
488 438 """
489 439 if not end_time:
490 440 end_time = datetime.utcnow().replace(second=0, microsecond=0)
491 441 delta = end_time - stats_since
492 442
493 443 if delta >= timedelta(minutes=60):
494 log.info(
495 "cached_split_data:{}: adjusting start time "
496 "for hourly or daily intervals".format(cache_key)
497 )
444 log.info('cached_split_data:{}: adjusting start time '
445 'for hourly or daily intervals'.format(cache_key))
498 446 stats_since = stats_since.replace(hour=0, minute=0)
499 447
500 storage_key = "buckets_split_data:" + cache_key + ":{}|{}"
448 storage_key = 'buckets_split_data:' + cache_key + ':{}|{}'
501 449 old_end_time = end_time.replace(hour=0, minute=0)
502 450
503 final_storage_key = storage_key.format(delta.total_seconds(), old_end_time)
451 final_storage_key = storage_key.format(delta.total_seconds(),
452 old_end_time)
504 453 older_data = None
505 454
506 cdata = request.registry.cache_regions.redis_day_7.get(final_storage_key)
455 cdata = request.registry.cache_regions.redis_day_7.get(
456 final_storage_key)
507 457
508 458 if cdata:
509 log.info("cached_split_data:{}: found old " "bucket data".format(cache_key))
459 log.info("cached_split_data:{}: found old "
460 "bucket data".format(cache_key))
510 461 older_data = cdata
511 462
512 if stats_since < end_time - h.time_deltas.get("24h")["delta"] and not cdata:
513 log.info(
514 "cached_split_data:{}: didn't find the "
515 "start bucket in cache so load older data".format(cache_key)
516 )
463 if (stats_since < end_time - h.time_deltas.get('24h')['delta'] and
464 not cdata):
465 log.info("cached_split_data:{}: didn't find the "
466 "start bucket in cache so load older data".format(cache_key))
517 467 recent_stats_since = old_end_time
518 older_data = fn(
519 request,
520 stats_since,
521 recent_stats_since,
522 db_session=db_session,
523 *args,
524 **kwargs
525 )
526 request.registry.cache_regions.redis_day_7.set(final_storage_key, older_data)
527 elif stats_since < end_time - h.time_deltas.get("24h")["delta"]:
468 older_data = fn(request, stats_since, recent_stats_since,
469 db_session=db_session, *args, **kwargs)
470 request.registry.cache_regions.redis_day_7.set(final_storage_key,
471 older_data)
472 elif stats_since < end_time - h.time_deltas.get('24h')['delta']:
528 473 recent_stats_since = old_end_time
529 474 else:
530 475 recent_stats_since = stats_since
531 476
532 log.info(
533 "cached_split_data:{}: loading fresh "
534 "data bucksts from last 24h ".format(cache_key)
535 )
536 todays_data = fn(
537 request, recent_stats_since, end_time, db_session=db_session, *args, **kwargs
538 )
477 log.info("cached_split_data:{}: loading fresh "
478 "data bucksts from last 24h ".format(cache_key))
479 todays_data = fn(request, recent_stats_since, end_time,
480 db_session=db_session, *args, **kwargs)
539 481 return older_data, todays_data
540 482
541 483
542 484 def in_batches(seq, size):
543 485 """
544 486 Splits am iterable into batches of specified size
545 487 :param seq (iterable)
546 488 :param size integer
547 489 """
548 return (seq[pos : pos + size] for pos in range(0, len(seq), size))
549
550
551 def get_es_info(cache_regions, es_conn):
552 @cache_regions.memory_min_10.cache_on_arguments()
553 def get_es_info_cached():
554 returned_info = {"raw_info": es_conn.info()}
555 returned_info["version"] = returned_info["raw_info"]["version"]["number"].split(
556 "."
557 )
558 return returned_info
559
560 return get_es_info_cached()
490 return (seq[pos:pos + size] for pos in range(0, len(seq), size))
@@ -1,161 +1,142 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors
4 4 #
5 5 # Licensed under the Apache License, Version 2.0 (the "License");
6 6 # you may not use this file except in compliance with the License.
7 7 # You may obtain a copy of the License at
8 8 #
9 9 # http://www.apache.org/licenses/LICENSE-2.0
10 10 #
11 11 # Unless required by applicable law or agreed to in writing, software
12 12 # distributed under the License is distributed on an "AS IS" BASIS,
13 13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 14 # See the License for the specific language governing permissions and
15 15 # limitations under the License.
16 16
17 17 import logging
18 18 import uuid
19 19
20 20 from datetime import datetime
21 21
22 22 log = logging.getLogger(__name__)
23 23
24 24
25 25 def parse_airbrake_xml(request):
26 26 root = request.context.airbrake_xml_etree
27 error = root.find("error")
28 notifier = root.find("notifier")
29 server_env = root.find("server-environment")
30 request_data = root.find("request")
31 user = root.find("current-user")
27 error = root.find('error')
28 notifier = root.find('notifier')
29 server_env = root.find('server-environment')
30 request_data = root.find('request')
31 user = root.find('current-user')
32 32 if request_data is not None:
33 cgi_data = request_data.find("cgi-data")
33 cgi_data = request_data.find('cgi-data')
34 34 if cgi_data is None:
35 35 cgi_data = []
36 36
37 37 error_dict = {
38 "class_name": error.findtext("class") or "",
39 "error": error.findtext("message") or "",
38 'class_name': error.findtext('class') or '',
39 'error': error.findtext('message') or '',
40 40 "occurences": 1,
41 41 "http_status": 500,
42 42 "priority": 5,
43 "server": "unknown",
44 "url": "unknown",
45 "request": {},
43 "server": 'unknown',
44 'url': 'unknown', 'request': {}
46 45 }
47 46 if user is not None:
48 error_dict["username"] = user.findtext("username") or user.findtext("id")
47 error_dict['username'] = user.findtext('username') or \
48 user.findtext('id')
49 49 if notifier is not None:
50 error_dict["client"] = notifier.findtext("name")
50 error_dict['client'] = notifier.findtext('name')
51 51
52 52 if server_env is not None:
53 error_dict["server"] = server_env.findtext("hostname", "unknown")
53 error_dict["server"] = server_env.findtext('hostname', 'unknown')
54 54
55 whitelist_environ = [
56 "REMOTE_USER",
57 "REMOTE_ADDR",
58 "SERVER_NAME",
59 "CONTENT_TYPE",
60 "HTTP_REFERER",
61 ]
55 whitelist_environ = ['REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME',
56 'CONTENT_TYPE', 'HTTP_REFERER']
62 57
63 58 if request_data is not None:
64 error_dict["url"] = request_data.findtext("url", "unknown")
65 component = request_data.findtext("component")
66 action = request_data.findtext("action")
59 error_dict['url'] = request_data.findtext('url', 'unknown')
60 component = request_data.findtext('component')
61 action = request_data.findtext('action')
67 62 if component and action:
68 error_dict["view_name"] = "%s:%s" % (component, action)
63 error_dict['view_name'] = '%s:%s' % (component, action)
69 64 for node in cgi_data:
70 key = node.get("key")
71 if key.startswith("HTTP") or key in whitelist_environ:
72 error_dict["request"][key] = node.text
73 elif "query_parameters" in key:
74 error_dict["request"]["GET"] = {}
65 key = node.get('key')
66 if key.startswith('HTTP') or key in whitelist_environ:
67 error_dict['request'][key] = node.text
68 elif 'query_parameters' in key:
69 error_dict['request']['GET'] = {}
75 70 for x in node:
76 error_dict["request"]["GET"][x.get("key")] = x.text
77 elif "request_parameters" in key:
78 error_dict["request"]["POST"] = {}
71 error_dict['request']['GET'][x.get('key')] = x.text
72 elif 'request_parameters' in key:
73 error_dict['request']['POST'] = {}
79 74 for x in node:
80 error_dict["request"]["POST"][x.get("key")] = x.text
81 elif key.endswith("cookie"):
82 error_dict["request"]["COOKIE"] = {}
75 error_dict['request']['POST'][x.get('key')] = x.text
76 elif key.endswith('cookie'):
77 error_dict['request']['COOKIE'] = {}
83 78 for x in node:
84 error_dict["request"]["COOKIE"][x.get("key")] = x.text
85 elif key.endswith("request_id"):
86 error_dict["request_id"] = node.text
87 elif key.endswith("session"):
88 error_dict["request"]["SESSION"] = {}
79 error_dict['request']['COOKIE'][x.get('key')] = x.text
80 elif key.endswith('request_id'):
81 error_dict['request_id'] = node.text
82 elif key.endswith('session'):
83 error_dict['request']['SESSION'] = {}
89 84 for x in node:
90 error_dict["request"]["SESSION"][x.get("key")] = x.text
85 error_dict['request']['SESSION'][x.get('key')] = x.text
91 86 else:
92 if key in ["rack.session.options"]:
87 if key in ['rack.session.options']:
93 88 # skip secret configs
94 89 continue
95 90 try:
96 91 if len(node):
97 error_dict["request"][key] = dict(
98 [(x.get("key"), x.text) for x in node]
99 )
92 error_dict['request'][key] = dict(
93 [(x.get('key'), x.text,) for x in node])
100 94 else:
101 error_dict["request"][key] = node.text
95 error_dict['request'][key] = node.text
102 96 except Exception as e:
103 log.warning("Airbrake integration exception: %s" % e)
97 log.warning('Airbrake integration exception: %s' % e)
104 98
105 error_dict["request"].pop("HTTP_COOKIE", "")
99 error_dict['request'].pop('HTTP_COOKIE', '')
106 100
107 error_dict["ip"] = error_dict.pop("REMOTE_ADDR", "")
108 error_dict["user_agent"] = error_dict.pop("HTTP_USER_AGENT", "")
109 if "request_id" not in error_dict:
110 error_dict["request_id"] = str(uuid.uuid4())
101 error_dict['ip'] = error_dict.pop('REMOTE_ADDR', '')
102 error_dict['user_agent'] = error_dict.pop('HTTP_USER_AGENT', '')
103 if 'request_id' not in error_dict:
104 error_dict['request_id'] = str(uuid.uuid4())
111 105 if request.context.possibly_public:
112 106 # set ip for reports that come from airbrake js client
113 107 error_dict["timestamp"] = datetime.utcnow()
114 108 if request.environ.get("HTTP_X_FORWARDED_FOR"):
115 ip = request.environ.get("HTTP_X_FORWARDED_FOR", "")
116 first_ip = ip.split(",")[0]
109 ip = request.environ.get("HTTP_X_FORWARDED_FOR", '')
110 first_ip = ip.split(',')[0]
117 111 remote_addr = first_ip.strip()
118 112 else:
119 remote_addr = request.environ.get("HTTP_X_REAL_IP") or request.environ.get(
120 "REMOTE_ADDR"
121 )
113 remote_addr = (request.environ.get("HTTP_X_REAL_IP") or
114 request.environ.get('REMOTE_ADDR'))
122 115 error_dict["ip"] = remote_addr
123 116
124 blacklist = [
125 "password",
126 "passwd",
127 "pwd",
128 "auth_tkt",
129 "secret",
130 "csrf",
131 "session",
132 "test",
133 ]
117 blacklist = ['password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf',
118 'session', 'test']
134 119
135 120 lines = []
136 for l in error.find("backtrace"):
137 lines.append(
138 {
139 "file": l.get("file", ""),
140 "line": l.get("number", ""),
141 "fn": l.get("method", ""),
142 "module": l.get("module", ""),
143 "cline": l.get("method", ""),
144 "vars": {},
145 }
146 )
147 error_dict["traceback"] = list(reversed(lines))
121 for l in error.find('backtrace'):
122 lines.append({'file': l.get("file", ""),
123 'line': l.get("number", ""),
124 'fn': l.get("method", ""),
125 'module': l.get("module", ""),
126 'cline': l.get("method", ""),
127 'vars': {}})
128 error_dict['traceback'] = list(reversed(lines))
148 129 # filtering is not provided by airbrake
149 130 keys_to_check = (
150 error_dict["request"].get("COOKIE"),
151 error_dict["request"].get("COOKIES"),
152 error_dict["request"].get("POST"),
153 error_dict["request"].get("SESSION"),
131 error_dict['request'].get('COOKIE'),
132 error_dict['request'].get('COOKIES'),
133 error_dict['request'].get('POST'),
134 error_dict['request'].get('SESSION'),
154 135 )
155 136 for source in [_f for _f in keys_to_check if _f]:
156 137 for k in source.keys():
157 138 for bad_key in blacklist:
158 139 if bad_key in k.lower():
159 source[k] = "***"
140 source[k] = '***'
160 141
161 142 return error_dict
@@ -1,56 +1,56 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors
4 4 #
5 5 # Licensed under the Apache License, Version 2.0 (the "License");
6 6 # you may not use this file except in compliance with the License.
7 7 # You may obtain a copy of the License at
8 8 #
9 9 # http://www.apache.org/licenses/LICENSE-2.0
10 10 #
11 11 # Unless required by applicable law or agreed to in writing, software
12 12 # distributed under the License is distributed on an "AS IS" BASIS,
13 13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 14 # See the License for the specific language governing permissions and
15 15 # limitations under the License.
16 16
17 17 from datetime import tzinfo, timedelta, datetime
18 18 from dateutil.relativedelta import relativedelta
19 19 import logging
20 20
21 21 log = logging.getLogger(__name__)
22 22
23 23
24 24 def to_relativedelta(time_delta):
25 return relativedelta(
26 seconds=int(time_delta.total_seconds()), microseconds=time_delta.microseconds
27 )
25 return relativedelta(seconds=int(time_delta.total_seconds()),
26 microseconds=time_delta.microseconds)
28 27
29 28
30 def convert_date(date_str, return_utcnow_if_wrong=True, normalize_future=False):
29 def convert_date(date_str, return_utcnow_if_wrong=True,
30 normalize_future=False):
31 31 utcnow = datetime.utcnow()
32 32 if isinstance(date_str, datetime):
33 33 # get rid of tzinfo
34 34 return date_str.replace(tzinfo=None)
35 35 if not date_str and return_utcnow_if_wrong:
36 36 return utcnow
37 37 try:
38 38 try:
39 if "Z" in date_str:
40 date_str = date_str[: date_str.index("Z")]
41 if "." in date_str:
42 date = datetime.strptime(date_str, "%Y-%m-%dT%H:%M:%S.%f")
39 if 'Z' in date_str:
40 date_str = date_str[:date_str.index('Z')]
41 if '.' in date_str:
42 date = datetime.strptime(date_str, '%Y-%m-%dT%H:%M:%S.%f')
43 43 else:
44 date = datetime.strptime(date_str, "%Y-%m-%dT%H:%M:%S")
44 date = datetime.strptime(date_str, '%Y-%m-%dT%H:%M:%S')
45 45 except Exception:
46 46 # bw compat with old client
47 date = datetime.strptime(date_str, "%Y-%m-%d %H:%M:%S,%f")
47 date = datetime.strptime(date_str, '%Y-%m-%d %H:%M:%S,%f')
48 48 except Exception:
49 49 if return_utcnow_if_wrong:
50 50 date = utcnow
51 51 else:
52 52 date = None
53 53 if normalize_future and date and date > (utcnow + timedelta(minutes=3)):
54 log.warning("time %s in future + 3 min, normalizing" % date)
54 log.warning('time %s in future + 3 min, normalizing' % date)
55 55 return utcnow
56 56 return date
@@ -1,317 +1,296 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors
4 4 #
5 5 # Licensed under the Apache License, Version 2.0 (the "License");
6 6 # you may not use this file except in compliance with the License.
7 7 # You may obtain a copy of the License at
8 8 #
9 9 # http://www.apache.org/licenses/LICENSE-2.0
10 10 #
11 11 # Unless required by applicable law or agreed to in writing, software
12 12 # distributed under the License is distributed on an "AS IS" BASIS,
13 13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 14 # See the License for the specific language governing permissions and
15 15 # limitations under the License.
16 16
17 17 from datetime import timedelta
18 18
19 19 from appenlight.lib.enums import LogLevelPython, ParsedSentryEventType
20 20
21 21 EXCLUDED_LOG_VARS = [
22 "args",
23 "asctime",
24 "created",
25 "exc_info",
26 "exc_text",
27 "filename",
28 "funcName",
29 "levelname",
30 "levelno",
31 "lineno",
32 "message",
33 "module",
34 "msecs",
35 "msg",
36 "name",
37 "pathname",
38 "process",
39 "processName",
40 "relativeCreated",
41 "thread",
42 "threadName",
43 ]
22 'args', 'asctime', 'created', 'exc_info', 'exc_text', 'filename',
23 'funcName', 'levelname', 'levelno', 'lineno', 'message', 'module', 'msecs',
24 'msg', 'name', 'pathname', 'process', 'processName', 'relativeCreated',
25 'thread', 'threadName']
44 26
45 27 EXCLUDE_SENTRY_KEYS = [
46 "csp",
47 "culprit",
48 "event_id",
49 "exception",
50 "extra",
51 "level",
52 "logentry",
53 "logger",
54 "message",
55 "modules",
56 "platform",
57 "query",
58 "release",
59 "request",
60 "sentry.interfaces.Csp",
61 "sentry.interfaces.Exception",
62 "sentry.interfaces.Http",
63 "sentry.interfaces.Message",
64 "sentry.interfaces.Query",
65 "sentry.interfaces.Stacktrace",
66 "sentry.interfaces.Template",
67 "sentry.interfaces.User",
68 "sentry.interfaces.csp.Csp",
69 "sentry.interfaces.exception.Exception",
70 "sentry.interfaces.http.Http",
71 "sentry.interfaces.message.Message",
72 "sentry.interfaces.query.Query",
73 "sentry.interfaces.stacktrace.Stacktrace",
74 "sentry.interfaces.template.Template",
75 "sentry.interfaces.user.User",
76 "server_name",
77 "stacktrace",
78 "tags",
79 "template",
80 "time_spent",
81 "timestamp",
82 "user",
83 ]
28 'csp',
29 'culprit',
30 'event_id',
31 'exception',
32 'extra',
33 'level',
34 'logentry',
35 'logger',
36 'message',
37 'modules',
38 'platform',
39 'query',
40 'release',
41 'request',
42 'sentry.interfaces.Csp', 'sentry.interfaces.Exception',
43 'sentry.interfaces.Http', 'sentry.interfaces.Message',
44 'sentry.interfaces.Query',
45 'sentry.interfaces.Stacktrace',
46 'sentry.interfaces.Template', 'sentry.interfaces.User',
47 'sentry.interfaces.csp.Csp',
48 'sentry.interfaces.exception.Exception',
49 'sentry.interfaces.http.Http',
50 'sentry.interfaces.message.Message',
51 'sentry.interfaces.query.Query',
52 'sentry.interfaces.stacktrace.Stacktrace',
53 'sentry.interfaces.template.Template',
54 'sentry.interfaces.user.User', 'server_name',
55 'stacktrace',
56 'tags',
57 'template',
58 'time_spent',
59 'timestamp',
60 'user']
84 61
85 62
86 63 def get_keys(list_of_keys, json_body):
87 64 for k in list_of_keys:
88 65 if k in json_body:
89 66 return json_body[k]
90 67
91 68
92 69 def get_logentry(json_body):
93 key_names = [
94 "logentry",
95 "sentry.interfaces.message.Message",
96 "sentry.interfaces.Message",
97 ]
70 key_names = ['logentry',
71 'sentry.interfaces.message.Message',
72 'sentry.interfaces.Message'
73 ]
98 74 logentry = get_keys(key_names, json_body)
99 75 return logentry
100 76
101 77
102 78 def get_exception(json_body):
103 79 parsed_exception = {}
104 key_names = [
105 "exception",
106 "sentry.interfaces.exception.Exception",
107 "sentry.interfaces.Exception",
108 ]
80 key_names = ['exception',
81 'sentry.interfaces.exception.Exception',
82 'sentry.interfaces.Exception'
83 ]
109 84 exception = get_keys(key_names, json_body) or {}
110 85 if exception:
111 86 if isinstance(exception, dict):
112 exception = exception["values"][0]
87 exception = exception['values'][0]
113 88 else:
114 89 exception = exception[0]
115 90
116 parsed_exception["type"] = exception.get("type")
117 parsed_exception["value"] = exception.get("value")
118 parsed_exception["module"] = exception.get("module")
91 parsed_exception['type'] = exception.get('type')
92 parsed_exception['value'] = exception.get('value')
93 parsed_exception['module'] = exception.get('module')
119 94 parsed_stacktrace = get_stacktrace(exception) or {}
120 95 parsed_exception = exception or {}
121 96 return parsed_exception, parsed_stacktrace
122 97
123 98
124 99 def get_stacktrace(json_body):
125 100 parsed_stacktrace = []
126 key_names = [
127 "stacktrace",
128 "sentry.interfaces.stacktrace.Stacktrace",
129 "sentry.interfaces.Stacktrace",
130 ]
101 key_names = ['stacktrace',
102 'sentry.interfaces.stacktrace.Stacktrace',
103 'sentry.interfaces.Stacktrace'
104 ]
131 105 stacktrace = get_keys(key_names, json_body)
132 106 if stacktrace:
133 for frame in stacktrace["frames"]:
107 for frame in stacktrace['frames']:
134 108 parsed_stacktrace.append(
135 {
136 "cline": frame.get("context_line", ""),
137 "file": frame.get("filename", ""),
138 "module": frame.get("module", ""),
139 "fn": frame.get("function", ""),
140 "line": frame.get("lineno", ""),
141 "vars": list(frame.get("vars", {}).items()),
142 }
109 {"cline": frame.get('context_line', ''),
110 "file": frame.get('filename', ''),
111 "module": frame.get('module', ''),
112 "fn": frame.get('function', ''),
113 "line": frame.get('lineno', ''),
114 "vars": list(frame.get('vars', {}).items())
115 }
143 116 )
144 117 return parsed_stacktrace
145 118
146 119
147 120 def get_template(json_body):
148 121 parsed_template = {}
149 key_names = [
150 "template",
151 "sentry.interfaces.template.Template",
152 "sentry.interfaces.Template",
153 ]
122 key_names = ['template',
123 'sentry.interfaces.template.Template',
124 'sentry.interfaces.Template'
125 ]
154 126 template = get_keys(key_names, json_body)
155 127 if template:
156 for frame in template["frames"]:
128 for frame in template['frames']:
157 129 parsed_template.append(
158 {
159 "cline": frame.get("context_line", ""),
160 "file": frame.get("filename", ""),
161 "fn": "",
162 "line": frame.get("lineno", ""),
163 "vars": [],
164 }
130 {"cline": frame.get('context_line', ''),
131 "file": frame.get('filename', ''),
132 "fn": '',
133 "line": frame.get('lineno', ''),
134 "vars": []
135 }
165 136 )
166 137
167 138 return parsed_template
168 139
169 140
170 141 def get_request(json_body):
171 142 parsed_http = {}
172 key_names = ["request", "sentry.interfaces.http.Http", "sentry.interfaces.Http"]
143 key_names = ['request',
144 'sentry.interfaces.http.Http',
145 'sentry.interfaces.Http'
146 ]
173 147 http = get_keys(key_names, json_body) or {}
174 148 for k, v in http.items():
175 if k == "headers":
176 parsed_http["headers"] = {}
177 for sk, sv in http["headers"].items():
178 parsed_http["headers"][sk.title()] = sv
149 if k == 'headers':
150 parsed_http['headers'] = {}
151 for sk, sv in http['headers'].items():
152 parsed_http['headers'][sk.title()] = sv
179 153 else:
180 154 parsed_http[k.lower()] = v
181 155 return parsed_http
182 156
183 157
184 158 def get_user(json_body):
185 159 parsed_user = {}
186 key_names = ["user", "sentry.interfaces.user.User", "sentry.interfaces.User"]
160 key_names = ['user',
161 'sentry.interfaces.user.User',
162 'sentry.interfaces.User'
163 ]
187 164 user = get_keys(key_names, json_body)
188 165 if user:
189 parsed_user["id"] = user.get("id")
190 parsed_user["username"] = user.get("username")
191 parsed_user["email"] = user.get("email")
192 parsed_user["ip_address"] = user.get("ip_address")
166 parsed_user['id'] = user.get('id')
167 parsed_user['username'] = user.get('username')
168 parsed_user['email'] = user.get('email')
169 parsed_user['ip_address'] = user.get('ip_address')
193 170
194 171 return parsed_user
195 172
196 173
197 174 def get_query(json_body):
198 175 query = None
199 key_name = ["query", "sentry.interfaces.query.Query", "sentry.interfaces.Query"]
176 key_name = ['query',
177 'sentry.interfaces.query.Query',
178 'sentry.interfaces.Query'
179 ]
200 180 query = get_keys(key_name, json_body)
201 181 return query
202 182
203 183
204 184 def parse_sentry_event(json_body):
205 request_id = json_body.get("event_id")
185 request_id = json_body.get('event_id')
206 186
207 187 # required
208 message = json_body.get("message")
209 log_timestamp = json_body.get("timestamp")
210 level = json_body.get("level")
188 message = json_body.get('message')
189 log_timestamp = json_body.get('timestamp')
190 level = json_body.get('level')
211 191 if isinstance(level, int):
212 192 level = LogLevelPython.key_from_value(level)
213 193
214 namespace = json_body.get("logger")
215 language = json_body.get("platform")
194 namespace = json_body.get('logger')
195 language = json_body.get('platform')
216 196
217 197 # optional
218 server_name = json_body.get("server_name")
219 culprit = json_body.get("culprit")
220 release = json_body.get("release")
198 server_name = json_body.get('server_name')
199 culprit = json_body.get('culprit')
200 release = json_body.get('release')
221 201
222 tags = json_body.get("tags", {})
223 if hasattr(tags, "items"):
202 tags = json_body.get('tags', {})
203 if hasattr(tags, 'items'):
224 204 tags = list(tags.items())
225 extra = json_body.get("extra", {})
226 if hasattr(extra, "items"):
205 extra = json_body.get('extra', {})
206 if hasattr(extra, 'items'):
227 207 extra = list(extra.items())
228 208
229 209 parsed_req = get_request(json_body)
230 210 user = get_user(json_body)
231 211 template = get_template(json_body)
232 212 query = get_query(json_body)
233 213
234 214 # other unidentified keys found
235 other_keys = [
236 (k, json_body[k]) for k in json_body.keys() if k not in EXCLUDE_SENTRY_KEYS
237 ]
215 other_keys = [(k, json_body[k]) for k in json_body.keys()
216 if k not in EXCLUDE_SENTRY_KEYS]
238 217
239 218 logentry = get_logentry(json_body)
240 219 if logentry:
241 message = logentry["message"]
220 message = logentry['message']
242 221
243 222 exception, stacktrace = get_exception(json_body)
244 223
245 224 alt_stacktrace = get_stacktrace(json_body)
246 225 event_type = None
247 226 if not exception and not stacktrace and not alt_stacktrace and not template:
248 227 event_type = ParsedSentryEventType.LOG
249 228
250 229 event_dict = {
251 "log_level": level,
252 "message": message,
253 "namespace": namespace,
254 "request_id": request_id,
255 "server": server_name,
256 "date": log_timestamp,
257 "tags": tags,
230 'log_level': level,
231 'message': message,
232 'namespace': namespace,
233 'request_id': request_id,
234 'server': server_name,
235 'date': log_timestamp,
236 'tags': tags
258 237 }
259 event_dict["tags"].extend(
260 [(k, v) for k, v in extra if k not in EXCLUDED_LOG_VARS]
261 )
238 event_dict['tags'].extend(
239 [(k, v) for k, v in extra if k not in EXCLUDED_LOG_VARS])
262 240
263 241 # other keys can be various object types
264 event_dict["tags"].extend([(k, v) for k, v in other_keys if isinstance(v, str)])
242 event_dict['tags'].extend([(k, v) for k, v in other_keys
243 if isinstance(v, str)])
265 244 if culprit:
266 event_dict["tags"].append(("sentry_culprit", culprit))
245 event_dict['tags'].append(('sentry_culprit', culprit))
267 246 if language:
268 event_dict["tags"].append(("sentry_language", language))
247 event_dict['tags'].append(('sentry_language', language))
269 248 if release:
270 event_dict["tags"].append(("sentry_release", release))
249 event_dict['tags'].append(('sentry_release', release))
271 250
272 251 if exception or stacktrace or alt_stacktrace or template:
273 252 event_type = ParsedSentryEventType.ERROR_REPORT
274 253 event_dict = {
275 "client": "sentry",
276 "error": message,
277 "namespace": namespace,
278 "request_id": request_id,
279 "server": server_name,
280 "start_time": log_timestamp,
281 "end_time": None,
282 "tags": tags,
283 "extra": extra,
284 "language": language,
285 "view_name": json_body.get("culprit"),
286 "http_status": None,
287 "username": None,
288 "url": parsed_req.get("url"),
289 "ip": None,
290 "user_agent": None,
291 "request": None,
292 "slow_calls": None,
293 "request_stats": None,
294 "traceback": None,
254 'client': 'sentry',
255 'error': message,
256 'namespace': namespace,
257 'request_id': request_id,
258 'server': server_name,
259 'start_time': log_timestamp,
260 'end_time': None,
261 'tags': tags,
262 'extra': extra,
263 'language': language,
264 'view_name': json_body.get('culprit'),
265 'http_status': None,
266 'username': None,
267 'url': parsed_req.get('url'),
268 'ip': None,
269 'user_agent': None,
270 'request': None,
271 'slow_calls': None,
272 'request_stats': None,
273 'traceback': None
295 274 }
296 275
297 event_dict["extra"].extend(other_keys)
276 event_dict['extra'].extend(other_keys)
298 277 if release:
299 event_dict["tags"].append(("sentry_release", release))
300 event_dict["request"] = parsed_req
301 if "headers" in parsed_req:
302 event_dict["user_agent"] = parsed_req["headers"].get("User-Agent")
303 if "env" in parsed_req:
304 event_dict["ip"] = parsed_req["env"].get("REMOTE_ADDR")
305 ts_ms = int(json_body.get("time_spent") or 0)
278 event_dict['tags'].append(('sentry_release', release))
279 event_dict['request'] = parsed_req
280 if 'headers' in parsed_req:
281 event_dict['user_agent'] = parsed_req['headers'].get('User-Agent')
282 if 'env' in parsed_req:
283 event_dict['ip'] = parsed_req['env'].get('REMOTE_ADDR')
284 ts_ms = int(json_body.get('time_spent') or 0)
306 285 if ts_ms > 0:
307 event_dict["end_time"] = event_dict["start_time"] + timedelta(
308 milliseconds=ts_ms
309 )
286 event_dict['end_time'] = event_dict['start_time'] + \
287 timedelta(milliseconds=ts_ms)
310 288 if stacktrace or alt_stacktrace or template:
311 event_dict["traceback"] = stacktrace or alt_stacktrace or template
289 event_dict['traceback'] = stacktrace or alt_stacktrace or template
312 290 for k in list(event_dict.keys()):
313 291 if event_dict[k] is None:
314 292 del event_dict[k]
315 293 if user:
316 event_dict["username"] = user["username"] or user["id"] or user["email"]
294 event_dict['username'] = user['username'] or user['id'] \
295 or user['email']
317 296 return event_dict, event_type
@@ -1,15 +1,17 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors
4 4 #
5 5 # Licensed under the Apache License, Version 2.0 (the "License");
6 6 # you may not use this file except in compliance with the License.
7 7 # You may obtain a copy of the License at
8 8 #
9 9 # http://www.apache.org/licenses/LICENSE-2.0
10 10 #
11 11 # Unless required by applicable law or agreed to in writing, software
12 12 # distributed under the License is distributed on an "AS IS" BASIS,
13 13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 14 # See the License for the specific language governing permissions and
15 15 # limitations under the License.
16
17
@@ -1,102 +1,98 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors
4 4 #
5 5 # Licensed under the Apache License, Version 2.0 (the "License");
6 6 # you may not use this file except in compliance with the License.
7 7 # You may obtain a copy of the License at
8 8 #
9 9 # http://www.apache.org/licenses/LICENSE-2.0
10 10 #
11 11 # Unless required by applicable law or agreed to in writing, software
12 12 # distributed under the License is distributed on an "AS IS" BASIS,
13 13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 14 # See the License for the specific language governing permissions and
15 15 # limitations under the License.
16 16
17 17 from alembic import context
18 18 from sqlalchemy import engine_from_config, pool, MetaData
19 19 from logging.config import fileConfig
20 20 from appenlight.models import NAMING_CONVENTION
21 21
22 22 # this is the Alembic Config object, which provides
23 23 # access to the values within the .ini file in use.
24 24 config = context.config
25 25
26 26 # Interpret the config file for Python logging.
27 27 # This line sets up loggers basically.
28 28 if config.config_file_name:
29 29 fileConfig(config.config_file_name)
30 30
31 31 # add your model's MetaData object here
32 32 # for 'autogenerate' support
33 33 # from myapp import mymodel
34 34 # target_metadata = mymodel.Base.metadata
35 35
36 36
37 37 target_metadata = MetaData(naming_convention=NAMING_CONVENTION)
38 38
39 39 # other values from the config, defined by the needs of env.py,
40 40 # can be acquired:
41 41 # my_important_option = config.get_main_option("my_important_option")
42 42 # ... etc.
43 43
44 VERSION_TABLE_NAME = "alembic_appenlight_version"
44 VERSION_TABLE_NAME = 'alembic_appenlight_version'
45 45
46 46
47 47 def run_migrations_offline():
48 48 """Run migrations in 'offline' mode.
49 49
50 50 This configures the context with just a URL
51 51 and not an Engine, though an Engine is acceptable
52 52 here as well. By skipping the Engine creation
53 53 we don't even need a DBAPI to be available.
54 54
55 55 Calls to context.execute() here emit the given string to the
56 56 script output.
57 57
58 58 """
59 59 url = config.get_main_option("sqlalchemy.url")
60 context.configure(
61 url=url,
62 target_metadata=target_metadata,
63 transaction_per_migration=True,
64 version_table=VERSION_TABLE_NAME,
65 )
60 context.configure(url=url, target_metadata=target_metadata,
61 transaction_per_migration=True,
62 version_table=VERSION_TABLE_NAME)
66 63
67 64 with context.begin_transaction():
68 65 context.run_migrations()
69 66
70 67
71 68 def run_migrations_online():
72 69 """Run migrations in 'online' mode.
73 70
74 71 In this scenario we need to create an Engine
75 72 and associate a connection with the context.
76 73
77 74 """
78 75 engine = engine_from_config(
79 76 config.get_section(config.config_ini_section),
80 prefix="sqlalchemy.",
81 poolclass=pool.NullPool,
82 )
77 prefix='sqlalchemy.',
78 poolclass=pool.NullPool)
83 79
84 80 connection = engine.connect()
85 81 context.configure(
86 82 connection=connection,
87 83 target_metadata=target_metadata,
88 84 transaction_per_migration=True,
89 version_table=VERSION_TABLE_NAME,
85 version_table=VERSION_TABLE_NAME
90 86 )
91 87
92 88 try:
93 89 with context.begin_transaction():
94 90 context.run_migrations()
95 91 finally:
96 92 connection.close()
97 93
98 94
99 95 if context.is_offline_mode():
100 96 run_migrations_offline()
101 97 else:
102 98 run_migrations_online()
This diff has been collapsed as it changes many lines, (789 lines changed) Show them Hide them
@@ -1,813 +1,624 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors
4 4 #
5 5 # Licensed under the Apache License, Version 2.0 (the "License");
6 6 # you may not use this file except in compliance with the License.
7 7 # You may obtain a copy of the License at
8 8 #
9 9 # http://www.apache.org/licenses/LICENSE-2.0
10 10 #
11 11 # Unless required by applicable law or agreed to in writing, software
12 12 # distributed under the License is distributed on an "AS IS" BASIS,
13 13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 14 # See the License for the specific language governing permissions and
15 15 # limitations under the License.
16 16
17 17 """initial tables
18 18
19 19 Revision ID: 55b6e612672f
20 20 Revises: None
21 21 Create Date: 2014-10-13 23:47:38.295159
22 22
23 23 """
24 24
25 25 # revision identifiers, used by Alembic.
26 revision = "55b6e612672f"
26 revision = '55b6e612672f'
27 27 down_revision = None
28 28
29 29 from alembic import op
30 30 import sqlalchemy as sa
31 31
32 32
33 33 def upgrade():
34 op.add_column("users", sa.Column("first_name", sa.Unicode(25)))
35 op.add_column("users", sa.Column("last_name", sa.Unicode(50)))
36 op.add_column("users", sa.Column("company_name", sa.Unicode(255)))
37 op.add_column("users", sa.Column("company_address", sa.Unicode(255)))
38 op.add_column("users", sa.Column("phone1", sa.Unicode(25)))
39 op.add_column("users", sa.Column("phone2", sa.Unicode(25)))
40 op.add_column("users", sa.Column("zip_code", sa.Unicode(25)))
41 op.add_column(
42 "users",
43 sa.Column(
44 "default_report_sort",
45 sa.Unicode(20),
46 nullable=False,
47 server_default="newest",
48 ),
49 )
50 op.add_column("users", sa.Column("city", sa.Unicode(128)))
51 op.add_column("users", sa.Column("notes", sa.UnicodeText, server_default=""))
52 op.add_column(
53 "users",
54 sa.Column("notifications", sa.Boolean(), nullable=False, server_default="true"),
55 )
56 op.add_column(
57 "users",
58 sa.Column("registration_ip", sa.Unicode(40), nullable=False, server_default=""),
59 )
34 op.add_column('users', sa.Column('first_name', sa.Unicode(25)))
35 op.add_column('users', sa.Column('last_name', sa.Unicode(50)))
36 op.add_column('users', sa.Column('company_name', sa.Unicode(255)))
37 op.add_column('users', sa.Column('company_address', sa.Unicode(255)))
38 op.add_column('users', sa.Column('phone1', sa.Unicode(25)))
39 op.add_column('users', sa.Column('phone2', sa.Unicode(25)))
40 op.add_column('users', sa.Column('zip_code', sa.Unicode(25)))
41 op.add_column('users', sa.Column('default_report_sort', sa.Unicode(20), nullable=False, server_default="newest"))
42 op.add_column('users', sa.Column('city', sa.Unicode(128)))
43 op.add_column('users', sa.Column('notes', sa.UnicodeText, server_default=''))
44 op.add_column('users', sa.Column('notifications', sa.Boolean(), nullable=False, server_default='true'))
45 op.add_column('users', sa.Column('registration_ip', sa.Unicode(40), nullable=False, server_default=''))
60 46
61 47 op.create_table(
62 "integrations",
63 sa.Column("id", sa.Integer(), primary_key=True),
64 sa.Column(
65 "resource_id",
66 sa.Integer(),
67 sa.ForeignKey(
68 "resources.resource_id", onupdate="cascade", ondelete="cascade"
69 ),
70 ),
71 sa.Column("integration_name", sa.Unicode(64)),
72 sa.Column("config", sa.dialects.postgresql.JSON, nullable=False),
73 sa.Column(
74 "modified_date", sa.DateTime(), nullable=False, server_default=sa.func.now()
75 ),
76 sa.Column("external_id", sa.Unicode(255)),
77 sa.Column("external_id2", sa.Unicode(255)),
48 'integrations',
49 sa.Column('id', sa.Integer(), primary_key=True),
50 sa.Column('resource_id', sa.Integer(),
51 sa.ForeignKey('resources.resource_id', onupdate='cascade',
52 ondelete='cascade')),
53 sa.Column('integration_name', sa.Unicode(64)),
54 sa.Column('config', sa.dialects.postgresql.JSON, nullable=False),
55 sa.Column('modified_date', sa.DateTime(), nullable=False, server_default=sa.func.now()),
56 sa.Column('external_id', sa.Unicode(255)),
57 sa.Column('external_id2', sa.Unicode(255))
78 58 )
79 59
80 60 op.create_table(
81 "alert_channels",
82 sa.Column(
83 "owner_id",
84 sa.Integer(),
85 sa.ForeignKey("users.id", onupdate="cascade", ondelete="cascade"),
86 nullable=False,
87 ),
88 sa.Column("channel_name", sa.Unicode(25), nullable=False),
89 sa.Column("channel_value", sa.Unicode(80), nullable=False),
90 sa.Column("channel_json_conf", sa.dialects.postgresql.JSON, nullable=False),
91 sa.Column(
92 "channel_validated", sa.Boolean, nullable=False, server_default="False"
93 ),
94 sa.Column("send_alerts", sa.Boolean, nullable=False, server_default="True"),
95 sa.Column(
96 "notify_only_first", sa.Boolean, nullable=False, server_default="False"
97 ),
98 sa.Column("daily_digest", sa.Boolean, nullable=False, server_default="True"),
99 sa.Column("pkey", sa.Integer(), primary_key=True),
100 sa.Column(
101 "integration_id",
102 sa.Integer,
103 sa.ForeignKey("integrations.id", onupdate="cascade", ondelete="cascade"),
104 ),
105 )
106 op.create_unique_constraint(
107 "uq_alert_channels",
108 "alert_channels",
109 ["owner_id", "channel_name", "channel_value"],
110 )
61 'alert_channels',
62 sa.Column('owner_id', sa.Integer(),
63 sa.ForeignKey('users.id', onupdate='cascade',
64 ondelete='cascade'), nullable=False),
65 sa.Column('channel_name', sa.Unicode(25), nullable=False),
66 sa.Column('channel_value', sa.Unicode(80), nullable=False),
67 sa.Column('channel_json_conf', sa.dialects.postgresql.JSON, nullable=False),
68 sa.Column('channel_validated', sa.Boolean, nullable=False, server_default='False'),
69 sa.Column('send_alerts', sa.Boolean, nullable=False, server_default='True'),
70 sa.Column('notify_only_first', sa.Boolean, nullable=False, server_default='False'),
71 sa.Column('daily_digest', sa.Boolean, nullable=False, server_default='True'),
72 sa.Column('pkey', sa.Integer(), primary_key=True),
73 sa.Column('integration_id', sa.Integer,
74 sa.ForeignKey('integrations.id', onupdate='cascade',
75 ondelete='cascade')),
76 )
77 op.create_unique_constraint('uq_alert_channels', 'alert_channels',
78 ["owner_id", "channel_name", "channel_value"])
111 79
112 80 op.create_table(
113 "alert_channels_actions",
114 sa.Column("owner_id", sa.Integer(), nullable=False),
115 sa.Column(
116 "resource_id",
117 sa.Integer(),
118 sa.ForeignKey(
119 "resources.resource_id", onupdate="cascade", ondelete="cascade"
120 ),
121 ),
122 sa.Column("pkey", sa.Integer(), primary_key=True),
123 sa.Column("action", sa.Unicode(10), nullable=False, server_default="always"),
124 sa.Column("rule", sa.dialects.postgresql.JSON),
125 sa.Column("type", sa.Unicode(10), index=True),
126 sa.Column("other_id", sa.Unicode(40), index=True),
127 sa.Column("config", sa.dialects.postgresql.JSON),
128 sa.Column("name", sa.Unicode(255), server_default=""),
81 'alert_channels_actions',
82 sa.Column('owner_id', sa.Integer(), nullable=False),
83 sa.Column('resource_id', sa.Integer(),
84 sa.ForeignKey('resources.resource_id', onupdate='cascade',
85 ondelete='cascade')),
86 sa.Column('pkey', sa.Integer(), primary_key=True),
87 sa.Column('action', sa.Unicode(10), nullable=False, server_default='always'),
88 sa.Column('rule', sa.dialects.postgresql.JSON),
89 sa.Column('type', sa.Unicode(10), index=True),
90 sa.Column('other_id', sa.Unicode(40), index=True),
91 sa.Column('config', sa.dialects.postgresql.JSON),
92 sa.Column('name', sa.Unicode(255), server_default='')
129 93 )
130 94
95
131 96 op.create_table(
132 "application_postprocess_conf",
133 sa.Column("pkey", sa.Integer(), primary_key=True),
134 sa.Column("do", sa.Unicode(25), nullable=False),
135 sa.Column("new_value", sa.UnicodeText(), nullable=False, server_default=""),
136 sa.Column(
137 "resource_id",
138 sa.Integer(),
139 sa.ForeignKey(
140 "resources.resource_id", onupdate="cascade", ondelete="cascade"
141 ),
142 nullable=False,
143 ),
144 sa.Column("rule", sa.dialects.postgresql.JSON),
97 'application_postprocess_conf',
98 sa.Column('pkey', sa.Integer(), primary_key=True),
99 sa.Column('do', sa.Unicode(25), nullable=False),
100 sa.Column('new_value', sa.UnicodeText(), nullable=False, server_default=''),
101 sa.Column('resource_id', sa.Integer(),
102 sa.ForeignKey('resources.resource_id',
103 onupdate='cascade',
104 ondelete='cascade'), nullable=False),
105 sa.Column('rule', sa.dialects.postgresql.JSON),
145 106 )
146 107
147 108 op.create_table(
148 "applications",
149 sa.Column(
150 "resource_id",
151 sa.Integer(),
152 sa.ForeignKey(
153 "resources.resource_id", onupdate="cascade", ondelete="cascade"
154 ),
155 nullable=False,
156 primary_key=True,
157 autoincrement=False,
158 ),
159 sa.Column("domains", sa.UnicodeText, nullable=False),
160 sa.Column("api_key", sa.Unicode(32), nullable=False, index=True),
161 sa.Column(
162 "default_grouping",
163 sa.Unicode(20),
164 nullable=False,
165 server_default="url_type",
166 ),
167 sa.Column("public_key", sa.Unicode(32), nullable=False, index=True),
168 sa.Column(
169 "error_report_threshold", sa.Integer(), server_default="10", nullable=False
170 ),
171 sa.Column(
172 "slow_report_threshold", sa.Integer(), server_default="10", nullable=False
173 ),
174 sa.Column("apdex_threshold", sa.Float(), server_default="0.7", nullable=False),
175 sa.Column(
176 "allow_permanent_storage",
177 sa.Boolean(),
178 server_default="false",
179 nullable=False,
180 ),
181 )
182 op.create_unique_constraint(None, "applications", ["public_key"])
183 op.create_unique_constraint(None, "applications", ["api_key"])
109 'applications',
110 sa.Column('resource_id', sa.Integer(),
111 sa.ForeignKey('resources.resource_id', onupdate='cascade',
112 ondelete='cascade'), nullable=False,
113 primary_key=True, autoincrement=False),
114 sa.Column('domains', sa.UnicodeText, nullable=False),
115 sa.Column('api_key', sa.Unicode(32), nullable=False, index=True),
116 sa.Column('default_grouping', sa.Unicode(20), nullable=False, server_default='url_type'),
117 sa.Column('public_key', sa.Unicode(32), nullable=False, index=True),
118 sa.Column('error_report_threshold', sa.Integer(), server_default='10', nullable=False),
119 sa.Column('slow_report_threshold', sa.Integer(), server_default='10', nullable=False),
120 sa.Column('apdex_threshold', sa.Float(), server_default='0.7', nullable=False),
121 sa.Column('allow_permanent_storage', sa.Boolean(), server_default="false", nullable=False),
122 )
123 op.create_unique_constraint(None, 'applications',
124 ["public_key"])
125 op.create_unique_constraint(None, 'applications',
126 ["api_key"])
184 127
185 128 op.create_table(
186 "metrics",
187 sa.Column("pkey", sa.types.BigInteger, nullable=False, primary_key=True),
188 sa.Column(
189 "resource_id",
190 sa.Integer(),
191 sa.ForeignKey(
192 "resources.resource_id", onupdate="cascade", ondelete="cascade"
193 ),
194 ),
195 sa.Column("timestamp", sa.DateTime),
196 sa.Column("namespace", sa.Unicode(255)),
197 sa.Column("tags", sa.dialects.postgresql.JSON, server_default="{}"),
129 'metrics',
130 sa.Column('pkey', sa.types.BigInteger, nullable=False, primary_key=True),
131 sa.Column('resource_id', sa.Integer(),
132 sa.ForeignKey('resources.resource_id',
133 onupdate='cascade',
134 ondelete='cascade')),
135 sa.Column('timestamp', sa.DateTime),
136 sa.Column('namespace', sa.Unicode(255)),
137 sa.Column('tags', sa.dialects.postgresql.JSON, server_default="{}")
198 138 )
199 139
200 140 op.create_table(
201 "events",
202 sa.Column("id", sa.Integer, nullable=False, primary_key=True),
203 sa.Column("start_date", sa.DateTime, nullable=False, index=True),
204 sa.Column("end_date", sa.DateTime),
205 sa.Column("status", sa.Integer(), nullable=False, index=True),
206 sa.Column("event_type", sa.Integer(), nullable=False, index=True),
207 sa.Column("origin_user_id", sa.Integer()),
208 sa.Column("target_user_id", sa.Integer()),
209 sa.Column("resource_id", sa.Integer(), index=True),
210 sa.Column("text", sa.UnicodeText, server_default=""),
211 sa.Column("values", sa.dialects.postgresql.JSON),
212 sa.Column("target_id", sa.Integer()),
213 sa.Column("target_uuid", sa.Unicode(40), index=True),
141 'events',
142 sa.Column('id', sa.Integer, nullable=False, primary_key=True),
143 sa.Column('start_date', sa.DateTime, nullable=False, index=True),
144 sa.Column('end_date', sa.DateTime),
145 sa.Column('status', sa.Integer(), nullable=False, index=True),
146 sa.Column('event_type', sa.Integer(), nullable=False, index=True),
147 sa.Column('origin_user_id', sa.Integer()),
148 sa.Column('target_user_id', sa.Integer()),
149 sa.Column('resource_id', sa.Integer(), index=True),
150 sa.Column('text', sa.UnicodeText, server_default=''),
151 sa.Column('values', sa.dialects.postgresql.JSON),
152 sa.Column('target_id', sa.Integer()),
153 sa.Column('target_uuid', sa.Unicode(40), index=True)
214 154 )
215 155
216 156 op.create_table(
217 "logs",
218 sa.Column("log_id", sa.types.BigInteger, nullable=False, primary_key=True),
219 sa.Column(
220 "resource_id",
221 sa.Integer(),
222 sa.ForeignKey(
223 "resources.resource_id", onupdate="cascade", ondelete="cascade"
224 ),
225 ),
226 sa.Column("log_level", sa.SmallInteger(), nullable=False),
227 sa.Column("primary_key", sa.Unicode(128), nullable=True),
228 sa.Column("message", sa.UnicodeText, nullable=False, server_default=""),
229 sa.Column("timestamp", sa.DateTime),
230 sa.Column("namespace", sa.Unicode(255)),
231 sa.Column("request_id", sa.Unicode(40)),
232 sa.Column("tags", sa.dialects.postgresql.JSON, server_default="{}"),
233 sa.Column("permanent", sa.Boolean(), server_default="false", nullable=False),
157 'logs',
158 sa.Column('log_id', sa.types.BigInteger, nullable=False, primary_key=True),
159 sa.Column('resource_id', sa.Integer(),
160 sa.ForeignKey('resources.resource_id',
161 onupdate='cascade',
162 ondelete='cascade')),
163 sa.Column('log_level', sa.SmallInteger(), nullable=False),
164 sa.Column('primary_key', sa.Unicode(128), nullable=True),
165 sa.Column('message', sa.UnicodeText, nullable=False, server_default=''),
166 sa.Column('timestamp', sa.DateTime),
167 sa.Column('namespace', sa.Unicode(255)),
168 sa.Column('request_id', sa.Unicode(40)),
169 sa.Column('tags', sa.dialects.postgresql.JSON, server_default="{}"),
170 sa.Column('permanent', sa.Boolean(), server_default="false",
171 nullable=False)
234 172 )
235 173
236 174 op.create_table(
237 "reports_groups",
238 sa.Column("id", sa.types.BigInteger, primary_key=True),
239 sa.Column(
240 "resource_id",
241 sa.Integer,
242 sa.ForeignKey(
243 "resources.resource_id", onupdate="cascade", ondelete="cascade"
244 ),
245 nullable=False,
246 ),
247 sa.Column("priority", sa.Integer, nullable=False, server_default="5"),
248 sa.Column(
249 "first_timestamp",
250 sa.DateTime(),
251 nullable=False,
252 server_default=sa.func.now(),
253 ),
254 sa.Column("last_timestamp", sa.DateTime()),
255 sa.Column("error", sa.UnicodeText, nullable=False, server_default=""),
256 sa.Column("grouping_hash", sa.Unicode(40), nullable=False, server_default=""),
257 sa.Column(
258 "triggered_postprocesses_ids",
259 sa.dialects.postgresql.JSON,
260 nullable=False,
261 server_default="[]",
262 ),
263 sa.Column("report_type", sa.Integer, nullable=False, server_default="0"),
264 sa.Column("total_reports", sa.Integer, nullable=False, server_default="0"),
265 sa.Column("last_report", sa.Integer, nullable=False, server_default="0"),
266 sa.Column("occurences", sa.Integer, nullable=False, server_default="1"),
267 sa.Column("average_duration", sa.Float(), nullable=False, server_default="0"),
268 sa.Column("summed_duration", sa.Float(), nullable=False, server_default="0"),
269 sa.Column("notified", sa.Boolean, nullable=False, server_default="False"),
270 sa.Column("fixed", sa.Boolean, nullable=False, server_default="False"),
271 sa.Column("public", sa.Boolean, nullable=False, server_default="False"),
272 sa.Column("read", sa.Boolean, nullable=False, server_default="False"),
175 'reports_groups',
176 sa.Column('id', sa.types.BigInteger, primary_key=True),
177 sa.Column('resource_id', sa.Integer,
178 sa.ForeignKey('resources.resource_id', onupdate='cascade',
179 ondelete='cascade'), nullable=False),
180 sa.Column('priority', sa.Integer, nullable=False, server_default="5"),
181 sa.Column('first_timestamp', sa.DateTime(), nullable=False, server_default=sa.func.now()),
182 sa.Column('last_timestamp', sa.DateTime()),
183 sa.Column('error', sa.UnicodeText, nullable=False, server_default=""),
184 sa.Column('grouping_hash', sa.Unicode(40), nullable=False, server_default=""),
185 sa.Column('triggered_postprocesses_ids', sa.dialects.postgresql.JSON, nullable=False, server_default="[]"),
186 sa.Column('report_type', sa.Integer, nullable=False, server_default="0"),
187 sa.Column('total_reports', sa.Integer, nullable=False, server_default="0"),
188 sa.Column('last_report', sa.Integer, nullable=False, server_default="0"),
189 sa.Column('occurences', sa.Integer, nullable=False, server_default="1"),
190 sa.Column('average_duration', sa.Float(), nullable=False, server_default="0"),
191 sa.Column('summed_duration', sa.Float(), nullable=False, server_default="0"),
192 sa.Column('notified', sa.Boolean, nullable=False, server_default="False"),
193 sa.Column('fixed', sa.Boolean, nullable=False, server_default="False"),
194 sa.Column('public', sa.Boolean, nullable=False, server_default="False"),
195 sa.Column('read', sa.Boolean, nullable=False, server_default="False"),
273 196 )
274 197
275 198 op.create_table(
276 "reports",
277 sa.Column("id", sa.types.BigInteger, primary_key=True),
278 sa.Column(
279 "group_id",
280 sa.types.BigInteger,
281 sa.ForeignKey("reports_groups.id", onupdate="cascade", ondelete="cascade"),
282 nullable=False,
283 index=True,
284 ),
285 sa.Column("resource_id", sa.Integer, nullable=False, index=True),
286 sa.Column("report_type", sa.Integer, nullable=False, server_default="0"),
287 sa.Column("error", sa.UnicodeText, nullable=False, server_default=""),
288 sa.Column(
289 "extra", sa.dialects.postgresql.JSON, nullable=False, server_default="{}"
290 ),
291 sa.Column(
292 "request", sa.dialects.postgresql.JSON, nullable=False, server_default="{}"
293 ),
294 sa.Column(
295 "tags", sa.dialects.postgresql.JSON, nullable=False, server_default="{}"
296 ),
297 sa.Column("ip", sa.Unicode(39), nullable=False, server_default=""),
298 sa.Column("username", sa.Unicode(255), nullable=False, server_default=""),
299 sa.Column("user_agent", sa.Unicode(512), nullable=False, server_default=""),
300 sa.Column("url", sa.UnicodeText, nullable=False, server_default=""),
301 sa.Column("request_id", sa.Unicode(40), nullable=False, server_default=""),
302 sa.Column(
303 "request_stats",
304 sa.dialects.postgresql.JSON,
305 nullable=False,
306 server_default="{}",
307 ),
308 sa.Column(
309 "traceback",
310 sa.dialects.postgresql.JSON,
311 nullable=False,
312 server_default="{}",
313 ),
314 sa.Column("traceback_hash", sa.Unicode(40), nullable=False, server_default=""),
315 sa.Column(
316 "start_time", sa.DateTime(), nullable=False, server_default=sa.func.now()
317 ),
318 sa.Column("end_time", sa.DateTime()),
319 sa.Column(
320 "report_group_time",
321 sa.DateTime,
322 index=True,
323 nullable=False,
324 server_default=sa.func.now(),
325 ),
326 sa.Column("duration", sa.Float(), nullable=False, server_default="0"),
327 sa.Column("http_status", sa.Integer, index=True),
328 sa.Column("url_domain", sa.Unicode(128)),
329 sa.Column("url_path", sa.UnicodeText),
330 sa.Column("language", sa.Integer, server_default="0"),
331 )
332 op.create_index(None, "reports", [sa.text("(tags ->> 'server_name')")])
333 op.create_index(None, "reports", [sa.text("(tags ->> 'view_name')")])
199 'reports',
200 sa.Column('id', sa.types.BigInteger, primary_key=True),
201 sa.Column('group_id', sa.types.BigInteger,
202 sa.ForeignKey('reports_groups.id', onupdate='cascade',
203 ondelete='cascade'), nullable=False, index=True),
204 sa.Column('resource_id', sa.Integer, nullable=False, index=True),
205 sa.Column('report_type', sa.Integer, nullable=False, server_default="0"),
206 sa.Column('error', sa.UnicodeText, nullable=False, server_default=""),
207 sa.Column('extra', sa.dialects.postgresql.JSON, nullable=False, server_default="{}"),
208 sa.Column('request', sa.dialects.postgresql.JSON, nullable=False, server_default="{}"),
209 sa.Column('tags', sa.dialects.postgresql.JSON, nullable=False, server_default="{}"),
210 sa.Column('ip', sa.Unicode(39), nullable=False, server_default=""),
211 sa.Column('username', sa.Unicode(255), nullable=False, server_default=""),
212 sa.Column('user_agent', sa.Unicode(512), nullable=False, server_default=""),
213 sa.Column('url', sa.UnicodeText, nullable=False, server_default=""),
214 sa.Column('request_id', sa.Unicode(40), nullable=False, server_default=""),
215 sa.Column('request_stats', sa.dialects.postgresql.JSON, nullable=False, server_default="{}"),
216 sa.Column('traceback', sa.dialects.postgresql.JSON, nullable=False, server_default="{}"),
217 sa.Column('traceback_hash', sa.Unicode(40), nullable=False, server_default=""),
218 sa.Column('start_time', sa.DateTime(), nullable=False, server_default=sa.func.now()),
219 sa.Column('end_time', sa.DateTime()),
220 sa.Column('report_group_time', sa.DateTime, index=True, nullable=False, server_default=sa.func.now()),
221 sa.Column('duration', sa.Float(), nullable=False, server_default="0"),
222 sa.Column('http_status', sa.Integer, index=True),
223 sa.Column('url_domain', sa.Unicode(128)),
224 sa.Column('url_path', sa.UnicodeText),
225 sa.Column('language', sa.Integer, server_default="0"),
226 )
227 op.create_index(None, 'reports',
228 [sa.text("(tags ->> 'server_name')")])
229 op.create_index(None, 'reports',
230 [sa.text("(tags ->> 'view_name')")])
334 231
335 232 op.create_table(
336 "reports_assignments",
337 sa.Column("group_id", sa.types.BigInteger, nullable=False, primary_key=True),
338 sa.Column(
339 "owner_id",
340 sa.Integer,
341 sa.ForeignKey("users.id", onupdate="cascade", ondelete="cascade"),
342 nullable=False,
343 primary_key=True,
344 ),
345 sa.Column("report_time", sa.DateTime, nullable=False),
346 )
233 'reports_assignments',
234 sa.Column('group_id', sa.types.BigInteger, nullable=False, primary_key=True),
235 sa.Column('owner_id', sa.Integer,
236 sa.ForeignKey('users.id', onupdate='cascade',ondelete='cascade'),
237 nullable=False, primary_key=True),
238 sa.Column('report_time', sa.DateTime, nullable=False)
239 )
347 240
348 241 op.create_table(
349 "reports_comments",
350 sa.Column("comment_id", sa.Integer, primary_key=True),
351 sa.Column("body", sa.UnicodeText, nullable=False, server_default=""),
352 sa.Column(
353 "owner_id",
354 sa.Integer,
355 sa.ForeignKey("users.id", onupdate="cascade", ondelete="set null"),
356 nullable=True,
357 ),
358 sa.Column(
359 "created_timestamp",
360 sa.DateTime,
361 nullable=False,
362 server_default=sa.func.now(),
363 ),
364 sa.Column("report_time", sa.DateTime, nullable=False),
365 sa.Column("group_id", sa.types.BigInteger, nullable=False),
242 'reports_comments',
243 sa.Column('comment_id', sa.Integer, primary_key=True),
244 sa.Column('body', sa.UnicodeText, nullable=False, server_default=''),
245 sa.Column('owner_id', sa.Integer,
246 sa.ForeignKey('users.id', onupdate='cascade',
247 ondelete='set null'), nullable=True),
248 sa.Column('created_timestamp', sa.DateTime, nullable=False, server_default=sa.func.now()),
249 sa.Column('report_time', sa.DateTime, nullable=False),
250 sa.Column('group_id', sa.types.BigInteger, nullable=False)
366 251 )
367 252
368 253 op.create_table(
369 "reports_stats",
370 sa.Column("resource_id", sa.Integer, nullable=False, index=True),
371 sa.Column("start_interval", sa.DateTime, nullable=False, index=True),
372 sa.Column("group_id", sa.types.BigInteger, index=True),
373 sa.Column(
374 "occurences", sa.Integer, nullable=False, server_default="0", index=True
375 ),
376 sa.Column("owner_user_id", sa.Integer),
377 sa.Column("type", sa.Integer, index=True, nullable=False),
378 sa.Column("duration", sa.Float(), server_default="0"),
379 sa.Column("server_name", sa.Unicode(128), server_default=""),
380 sa.Column("view_name", sa.Unicode(128), server_default=""),
381 sa.Column("id", sa.BigInteger(), nullable=False, primary_key=True),
382 )
383 op.create_index(
384 "ix_reports_stats_start_interval_group_id",
385 "reports_stats",
386 ["start_interval", "group_id"],
387 )
254 'reports_stats',
255 sa.Column('resource_id', sa.Integer, nullable=False, index=True),
256 sa.Column('start_interval', sa.DateTime, nullable=False, index=True),
257 sa.Column('group_id', sa.types.BigInteger, index=True),
258 sa.Column('occurences', sa.Integer, nullable=False, server_default='0', index=True),
259 sa.Column('owner_user_id', sa.Integer),
260 sa.Column('type', sa.Integer, index=True, nullable=False),
261 sa.Column('duration', sa.Float(), server_default='0'),
262 sa.Column('server_name', sa.Unicode(128),
263 server_default=''),
264 sa.Column('view_name', sa.Unicode(128),
265 server_default=''),
266 sa.Column('id', sa.BigInteger(), nullable=False, primary_key=True),
267 )
268 op.create_index('ix_reports_stats_start_interval_group_id', 'reports_stats',
269 ["start_interval", "group_id"])
388 270
389 271 op.create_table(
390 "slow_calls",
391 sa.Column("id", sa.types.BigInteger, primary_key=True),
392 sa.Column(
393 "report_id",
394 sa.types.BigInteger,
395 sa.ForeignKey("reports.id", onupdate="cascade", ondelete="cascade"),
396 nullable=False,
397 index=True,
398 ),
399 sa.Column(
400 "duration", sa.Float(), nullable=False, server_default="0", index=True
401 ),
402 sa.Column(
403 "timestamp",
404 sa.DateTime,
405 nullable=False,
406 server_default=sa.func.now(),
407 index=True,
408 ),
409 sa.Column(
410 "report_group_time",
411 sa.DateTime,
412 index=True,
413 nullable=False,
414 server_default=sa.func.now(),
415 ),
416 sa.Column("type", sa.Unicode(16), nullable=False, index=True),
417 sa.Column("statement", sa.UnicodeText, nullable=False, server_default=""),
418 sa.Column("parameters", sa.dialects.postgresql.JSON, nullable=False),
419 sa.Column("location", sa.UnicodeText, server_default=""),
420 sa.Column("subtype", sa.Unicode(16), nullable=False, index=True),
421 sa.Column("resource_id", sa.Integer, nullable=False, index=True),
422 sa.Column("statement_hash", sa.Unicode(60), index=True),
272 'slow_calls',
273 sa.Column('id', sa.types.BigInteger, primary_key=True),
274 sa.Column('report_id', sa.types.BigInteger, sa.ForeignKey('reports.id', onupdate='cascade', ondelete='cascade'),
275 nullable=False, index=True),
276 sa.Column('duration', sa.Float(), nullable=False, server_default="0", index=True),
277 sa.Column('timestamp', sa.DateTime, nullable=False, server_default=sa.func.now(), index=True),
278 sa.Column('report_group_time', sa.DateTime, index=True, nullable=False, server_default=sa.func.now()),
279 sa.Column('type', sa.Unicode(16), nullable=False, index=True),
280 sa.Column('statement', sa.UnicodeText, nullable=False, server_default=''),
281 sa.Column('parameters', sa.dialects.postgresql.JSON, nullable=False),
282 sa.Column('location', sa.UnicodeText, server_default=''),
283 sa.Column('subtype', sa.Unicode(16), nullable=False, index=True),
284 sa.Column('resource_id', sa.Integer, nullable=False, index=True),
285 sa.Column('statement_hash', sa.Unicode(60), index=True)
423 286 )
424 287
425 288 op.create_table(
426 "tags",
427 sa.Column("id", sa.types.BigInteger, primary_key=True),
428 sa.Column(
429 "resource_id",
430 sa.Integer,
431 sa.ForeignKey(
432 "resources.resource_id", onupdate="cascade", ondelete="cascade"
433 ),
434 ),
435 sa.Column(
436 "first_timestamp", sa.DateTime, nullable=False, server_default=sa.func.now()
437 ),
438 sa.Column(
439 "last_timestamp", sa.DateTime, nullable=False, server_default=sa.func.now()
440 ),
441 sa.Column("name", sa.Unicode(32), nullable=False),
442 sa.Column("value", sa.dialects.postgresql.JSON, nullable=False),
443 sa.Column("times_seen", sa.Integer, nullable=False, server_default="1"),
289 'tags',
290 sa.Column('id', sa.types.BigInteger, primary_key=True),
291 sa.Column('resource_id', sa.Integer,
292 sa.ForeignKey('resources.resource_id', onupdate='cascade',
293 ondelete='cascade')),
294 sa.Column('first_timestamp', sa.DateTime, nullable=False, server_default=sa.func.now()),
295 sa.Column('last_timestamp', sa.DateTime, nullable=False, server_default=sa.func.now()),
296 sa.Column('name', sa.Unicode(32), nullable=False),
297 sa.Column('value', sa.dialects.postgresql.JSON, nullable=False),
298 sa.Column('times_seen', sa.Integer, nullable=False, server_default='1')
444 299 )
445 300
446 301 op.create_table(
447 "auth_tokens",
448 sa.Column("id", sa.Integer, nullable=False, primary_key=True),
449 sa.Column("token", sa.Unicode),
450 sa.Column(
451 "creation_date", sa.DateTime, nullable=False, server_default=sa.func.now()
452 ),
453 sa.Column("expires", sa.DateTime),
454 sa.Column(
455 "owner_id",
456 sa.Integer,
457 sa.ForeignKey("users.id", onupdate="cascade", ondelete="cascade"),
458 ),
459 sa.Column("description", sa.Unicode),
302 'auth_tokens',
303 sa.Column('id', sa.Integer, nullable=False, primary_key=True),
304 sa.Column('token', sa.Unicode),
305 sa.Column('creation_date', sa.DateTime, nullable=False, server_default=sa.func.now()),
306 sa.Column('expires', sa.DateTime),
307 sa.Column('owner_id', sa.Integer,
308 sa.ForeignKey('users.id', onupdate='cascade',
309 ondelete='cascade')),
310 sa.Column('description', sa.Unicode),
460 311 )
461 312
462 313 op.create_table(
463 "channels_actions",
464 sa.Column(
465 "channel_pkey",
466 sa.Integer,
467 sa.ForeignKey(
468 "alert_channels.pkey", ondelete="CASCADE", onupdate="CASCADE"
469 ),
470 ),
471 sa.Column(
472 "action_pkey",
473 sa.Integer,
474 sa.ForeignKey(
475 "alert_channels_actions.pkey", ondelete="CASCADE", onupdate="CASCADE"
476 ),
477 ),
314 'channels_actions',
315 sa.Column('channel_pkey', sa.Integer,
316 sa.ForeignKey('alert_channels.pkey',
317 ondelete='CASCADE', onupdate='CASCADE')),
318 sa.Column('action_pkey', sa.Integer,
319 sa.ForeignKey('alert_channels_actions.pkey',
320 ondelete='CASCADE', onupdate='CASCADE'))
478 321 )
479 322
480 323 op.create_table(
481 "config",
482 sa.Column("key", sa.Unicode(128), primary_key=True),
483 sa.Column("section", sa.Unicode(128), primary_key=True),
484 sa.Column("value", sa.dialects.postgresql.JSON, server_default="{}"),
324 'config',
325 sa.Column('key', sa.Unicode(128), primary_key=True),
326 sa.Column('section', sa.Unicode(128), primary_key=True),
327 sa.Column('value', sa.dialects.postgresql.JSON,
328 server_default="{}")
485 329 )
486 330
487 331 op.create_table(
488 "plugin_configs",
489 sa.Column("id", sa.Integer, primary_key=True),
490 sa.Column("plugin_name", sa.Unicode(128)),
491 sa.Column("section", sa.Unicode(128)),
492 sa.Column("config", sa.dialects.postgresql.JSON, server_default="{}"),
493 sa.Column(
494 "resource_id",
495 sa.Integer(),
496 sa.ForeignKey(
497 "resources.resource_id", onupdate="cascade", ondelete="cascade"
498 ),
499 ),
500 sa.Column(
501 "owner_id",
502 sa.Integer(),
503 sa.ForeignKey("users.id", onupdate="cascade", ondelete="cascade"),
504 ),
505 )
332 'plugin_configs',
333 sa.Column('id', sa.Integer, primary_key=True),
334 sa.Column('plugin_name', sa.Unicode(128)),
335 sa.Column('section', sa.Unicode(128)),
336 sa.Column('config', sa.dialects.postgresql.JSON,
337 server_default="{}"),
338 sa.Column('resource_id', sa.Integer(),
339 sa.ForeignKey('resources.resource_id', onupdate='cascade',
340 ondelete='cascade')),
341 sa.Column('owner_id', sa.Integer(),
342 sa.ForeignKey('users.id', onupdate='cascade',
343 ondelete='cascade')))
506 344
507 345 op.create_table(
508 "rc_versions",
509 sa.Column("name", sa.Unicode(40), primary_key=True),
510 sa.Column("value", sa.Unicode(40)),
511 )
512 version_table = sa.table(
513 "rc_versions",
514 sa.Column("name", sa.Unicode(40)),
515 sa.Column("value", sa.Unicode(40)),
346 'rc_versions',
347 sa.Column('name', sa.Unicode(40), primary_key=True),
348 sa.Column('value', sa.Unicode(40)),
516 349 )
350 version_table = sa.table('rc_versions',
351 sa.Column('name', sa.Unicode(40)),
352 sa.Column('value', sa.Unicode(40)))
517 353
518 insert = version_table.insert().values(name="es_reports")
354 insert = version_table.insert().values(name='es_reports')
519 355 op.execute(insert)
520 insert = version_table.insert().values(name="es_reports_groups")
356 insert = version_table.insert().values(name='es_reports_groups')
521 357 op.execute(insert)
522 insert = version_table.insert().values(name="es_reports_stats")
358 insert = version_table.insert().values(name='es_reports_stats')
523 359 op.execute(insert)
524 insert = version_table.insert().values(name="es_logs")
360 insert = version_table.insert().values(name='es_logs')
525 361 op.execute(insert)
526 insert = version_table.insert().values(name="es_metrics")
362 insert = version_table.insert().values(name='es_metrics')
527 363 op.execute(insert)
528 insert = version_table.insert().values(name="es_slow_calls")
364 insert = version_table.insert().values(name='es_slow_calls')
529 365 op.execute(insert)
530 366
531 op.execute(
532 """
367
368 op.execute('''
533 369 CREATE OR REPLACE FUNCTION floor_time_5min(timestamp without time zone)
534 370 RETURNS timestamp without time zone AS
535 371 $BODY$SELECT date_trunc('hour', $1) + INTERVAL '5 min' * FLOOR(date_part('minute', $1) / 5.0)$BODY$
536 372 LANGUAGE sql VOLATILE;
537 """
538 )
373 ''')
539 374
540 op.execute(
541 """
375 op.execute('''
542 376 CREATE OR REPLACE FUNCTION partition_logs() RETURNS trigger
543 377 LANGUAGE plpgsql SECURITY DEFINER
544 378 AS $$
545 379 DECLARE
546 380 main_table varchar := 'logs';
547 381 partitioned_table varchar := '';
548 382 BEGIN
549 383
550 384 IF NEW.permanent THEN
551 385 partitioned_table := main_table || '_p_' || date_part('year', NEW.timestamp)::TEXT || '_' || DATE_part('month', NEW.timestamp);
552 386 ELSE
553 387 partitioned_table := main_table || '_p_' || date_part('year', NEW.timestamp)::TEXT || '_' || DATE_part('month', NEW.timestamp) || '_' || DATE_part('day', NEW.timestamp);
554 388 END IF;
555 389
556 390 BEGIN
557 391 EXECUTE 'INSERT INTO ' || partitioned_table || ' SELECT(' || TG_TABLE_NAME || ' ' || quote_literal(NEW) || ').*;';
558 392 EXCEPTION
559 393 WHEN undefined_table THEN
560 394 RAISE NOTICE 'A partition has been created %', partitioned_table;
561 395 IF NEW.permanent THEN
562 396 EXECUTE format('CREATE TABLE IF NOT EXISTS %s ( CHECK( timestamp >= DATE %s AND timestamp < DATE %s)) INHERITS (%s)',
563 397 partitioned_table,
564 398 quote_literal(date_trunc('month', NEW.timestamp)::date) ,
565 399 quote_literal((date_trunc('month', NEW.timestamp)::date + interval '1 month')::text),
566 400 main_table);
567 401 EXECUTE format('ALTER TABLE %s ADD CONSTRAINT pk_%s PRIMARY KEY(log_id);', partitioned_table, partitioned_table);
568 402 EXECUTE format('ALTER TABLE %s ADD CONSTRAINT fk_%s_resource_id FOREIGN KEY (resource_id) REFERENCES resources (resource_id) MATCH SIMPLE ON UPDATE CASCADE ON DELETE CASCADE;', partitioned_table, partitioned_table);
569 403 EXECUTE format('CREATE INDEX ix_%s_timestamp ON %s (timestamp);', partitioned_table, partitioned_table);
570 404 EXECUTE format('CREATE INDEX ix_%s_namespace_resource_id ON %s (namespace, resource_id);', partitioned_table, partitioned_table);
571 405 EXECUTE format('CREATE INDEX ix_%s_resource_id ON %s (resource_id);', partitioned_table, partitioned_table);
572 406 EXECUTE format('CREATE INDEX ix_%s_pkey_namespace ON %s (primary_key, namespace);', partitioned_table, partitioned_table);
573 407 ELSE
574 408 EXECUTE format('CREATE TABLE IF NOT EXISTS %s ( CHECK( timestamp >= DATE %s AND timestamp < DATE %s)) INHERITS (%s)',
575 409 partitioned_table,
576 410 quote_literal(date_trunc('day', NEW.timestamp)::date) ,
577 411 quote_literal((date_trunc('day', NEW.timestamp)::date + interval '1 day')::text),
578 412 main_table);
579 413 EXECUTE format('ALTER TABLE %s ADD CONSTRAINT pk_%s_ PRIMARY KEY(log_id);', partitioned_table, partitioned_table);
580 414 EXECUTE format('ALTER TABLE %s ADD CONSTRAINT fk_%s_resource_id FOREIGN KEY (resource_id) REFERENCES resources (resource_id) MATCH SIMPLE ON UPDATE CASCADE ON DELETE CASCADE;', partitioned_table, partitioned_table);
581 415 EXECUTE format('CREATE INDEX ix_%s_timestamp ON %s (timestamp);', partitioned_table, partitioned_table);
582 416 EXECUTE format('CREATE INDEX ix_%s_namespace_resource_id ON %s (namespace, resource_id);', partitioned_table, partitioned_table);
583 417 EXECUTE format('CREATE INDEX ix_%s_resource_id ON %s (resource_id);', partitioned_table, partitioned_table);
584 418 EXECUTE format('CREATE INDEX ix_%s_primary_key_namespace ON %s (primary_key,namespace);', partitioned_table, partitioned_table);
585 419 END IF;
586 420
587 421
588 422 EXECUTE 'INSERT INTO ' || partitioned_table || ' SELECT(' || TG_TABLE_NAME || ' ' || quote_literal(NEW) || ').*;';
589 423 END;
590 424
591 425
592 426 RETURN NULL;
593 427 END
594 428 $$;
595 """
596 )
429 ''')
597 430
598 op.execute(
599 """
431 op.execute('''
600 432 CREATE TRIGGER partition_logs BEFORE INSERT ON logs FOR EACH ROW EXECUTE PROCEDURE partition_logs();
601 """
602 )
433 ''')
603 434
604 op.execute(
605 """
435 op.execute('''
606 436 CREATE OR REPLACE FUNCTION partition_metrics() RETURNS trigger
607 437 LANGUAGE plpgsql SECURITY DEFINER
608 438 AS $$
609 439 DECLARE
610 440 main_table varchar := 'metrics';
611 441 partitioned_table varchar := '';
612 442 BEGIN
613 443
614 444 partitioned_table := main_table || '_p_' || date_part('year', NEW.timestamp)::TEXT || '_' || DATE_part('month', NEW.timestamp) || '_' || DATE_part('day', NEW.timestamp);
615 445
616 446 BEGIN
617 447 EXECUTE 'INSERT INTO ' || partitioned_table || ' SELECT(' || TG_TABLE_NAME || ' ' || quote_literal(NEW) || ').*;';
618 448 EXCEPTION
619 449 WHEN undefined_table THEN
620 450 RAISE NOTICE 'A partition has been created %', partitioned_table;
621 451 EXECUTE format('CREATE TABLE IF NOT EXISTS %s ( CHECK( timestamp >= DATE %s AND timestamp < DATE %s)) INHERITS (%s)',
622 452 partitioned_table,
623 453 quote_literal(date_trunc('day', NEW.timestamp)::date) ,
624 454 quote_literal((date_trunc('day', NEW.timestamp)::date + interval '1 day')::text),
625 455 main_table);
626 456 EXECUTE format('ALTER TABLE %s ADD CONSTRAINT pk_%s PRIMARY KEY(pkey);', partitioned_table, partitioned_table);
627 457 EXECUTE format('ALTER TABLE %s ADD CONSTRAINT fk_%s_resource_id FOREIGN KEY (resource_id) REFERENCES resources (resource_id) MATCH SIMPLE ON UPDATE CASCADE ON DELETE CASCADE;', partitioned_table, partitioned_table);
628 458 EXECUTE format('CREATE INDEX ix_%s_timestamp ON %s (timestamp);', partitioned_table, partitioned_table);
629 459 EXECUTE format('CREATE INDEX ix_%s_resource_id ON %s (resource_id);', partitioned_table, partitioned_table);
630 460 EXECUTE 'INSERT INTO ' || partitioned_table || ' SELECT(' || TG_TABLE_NAME || ' ' || quote_literal(NEW) || ').*;';
631 461 END;
632 462
633 463 RETURN NULL;
634 464 END
635 465 $$;
636 """
637 )
466 ''')
638 467
639 op.execute(
640 """
468 op.execute('''
641 469 CREATE TRIGGER partition_metrics BEFORE INSERT ON metrics FOR EACH ROW EXECUTE PROCEDURE partition_metrics();
642 """
643 )
470 ''')
644 471
645 op.execute(
646 """
472 op.execute('''
647 473 CREATE FUNCTION partition_reports_stats() RETURNS trigger
648 474 LANGUAGE plpgsql SECURITY DEFINER
649 475 AS $$
650 476 DECLARE
651 477 main_table varchar := 'reports_stats';
652 478 partitioned_table varchar := '';
653 479 BEGIN
654 480
655 481 partitioned_table := main_table || '_p_' || date_part('year', NEW.start_interval)::TEXT || '_' || DATE_part('month', NEW.start_interval);
656 482
657 483 BEGIN
658 484 EXECUTE 'INSERT INTO ' || partitioned_table || ' SELECT(' || TG_TABLE_NAME || ' ' || quote_literal(NEW) || ').*;';
659 485 EXCEPTION
660 486 WHEN undefined_table THEN
661 487 RAISE NOTICE 'A partition has been created %', partitioned_table;
662 488 EXECUTE format('CREATE TABLE IF NOT EXISTS %s ( CHECK( start_interval >= DATE %s AND start_interval < DATE %s )) INHERITS (%s)',
663 489 partitioned_table,
664 490 quote_literal(date_trunc('month', NEW.start_interval)::date) ,
665 491 quote_literal((date_trunc('month', NEW.start_interval)::date + interval '1 month')::text),
666 492 main_table);
667 493 EXECUTE format('ALTER TABLE %s ADD CONSTRAINT pk_%s PRIMARY KEY(id);', partitioned_table, partitioned_table);
668 494 EXECUTE format('CREATE INDEX ix_%s_start_interval ON %s USING btree (start_interval);', partitioned_table, partitioned_table);
669 495 EXECUTE format('CREATE INDEX ix_%s_type ON %s USING btree (type);', partitioned_table, partitioned_table);
670 496 EXECUTE format('CREATE INDEX ix_%s_resource_id ON %s USING btree (resource_id);', partitioned_table, partitioned_table);
671 497 EXECUTE 'INSERT INTO ' || partitioned_table || ' SELECT(' || TG_TABLE_NAME || ' ' || quote_literal(NEW) || ').*;';
672 498 END;
673 499 RETURN NULL;
674 500 END
675 501 $$;
676 """
677 )
502 ''')
678 503
679 op.execute(
680 """
504 op.execute('''
681 505 CREATE TRIGGER partition_reports_stats BEFORE INSERT ON reports_stats FOR EACH ROW EXECUTE PROCEDURE partition_reports_stats();
682 """
683 )
506 ''')
684 507
685 op.execute(
686 """
508 op.execute('''
687 509 CREATE OR REPLACE FUNCTION partition_reports_groups() RETURNS trigger
688 510 LANGUAGE plpgsql SECURITY DEFINER
689 511 AS $$
690 512 DECLARE
691 513 main_table varchar := 'reports_groups';
692 514 partitioned_table varchar := '';
693 515 BEGIN
694 516
695 517 partitioned_table := main_table || '_p_' || date_part('year', NEW.first_timestamp)::TEXT || '_' || DATE_part('month', NEW.first_timestamp);
696 518
697 519 BEGIN
698 520 EXECUTE 'INSERT INTO ' || partitioned_table || ' SELECT(' || TG_TABLE_NAME || ' ' || quote_literal(NEW) || ').*;';
699 521 EXCEPTION
700 522 WHEN undefined_table THEN
701 523 RAISE NOTICE 'A partition has been created %', partitioned_table;
702 524 EXECUTE format('CREATE TABLE IF NOT EXISTS %s ( CHECK( first_timestamp >= DATE %s AND first_timestamp < DATE %s )) INHERITS (%s)',
703 525 partitioned_table,
704 526 quote_literal(date_trunc('month', NEW.first_timestamp)::date) ,
705 527 quote_literal((date_trunc('month', NEW.first_timestamp)::date + interval '1 month')::text),
706 528 main_table);
707 529 EXECUTE format('ALTER TABLE %s ADD CONSTRAINT pk_%s PRIMARY KEY(id);', partitioned_table, partitioned_table);
708 530 EXECUTE format('ALTER TABLE %s ADD CONSTRAINT fk_%s_resource_id FOREIGN KEY (resource_id) REFERENCES resources (resource_id) MATCH SIMPLE ON UPDATE CASCADE ON DELETE CASCADE;', partitioned_table, partitioned_table);
709 531 EXECUTE 'INSERT INTO ' || partitioned_table || ' SELECT(' || TG_TABLE_NAME || ' ' || quote_literal(NEW) || ').*;';
710 532 END;
711 533 RETURN NULL;
712 534 END
713 535 $$;
714 """
715 )
536 ''')
716 537
717 op.execute(
718 """
538 op.execute('''
719 539 CREATE TRIGGER partition_reports_groups BEFORE INSERT ON reports_groups FOR EACH ROW EXECUTE PROCEDURE partition_reports_groups();
720 """
721 )
540 ''')
722 541
723 op.execute(
724 """
542 op.execute('''
725 543 CREATE OR REPLACE FUNCTION partition_reports() RETURNS trigger
726 544 LANGUAGE plpgsql SECURITY DEFINER
727 545 AS $$
728 546 DECLARE
729 547 main_table varchar := 'reports';
730 548 partitioned_table varchar := '';
731 549 partitioned_parent_table varchar := '';
732 550 BEGIN
733 551
734 552 partitioned_table := main_table || '_p_' || date_part('year', NEW.report_group_time)::TEXT || '_' || DATE_part('month', NEW.report_group_time);
735 553 partitioned_parent_table := 'reports_groups_p_' || date_part('year', NEW.report_group_time)::TEXT || '_' || DATE_part('month', NEW.report_group_time);
736 554
737 555 BEGIN
738 556 EXECUTE 'INSERT INTO ' || partitioned_table || ' SELECT(' || TG_TABLE_NAME || ' ' || quote_literal(NEW) || ').*;';
739 557 EXCEPTION
740 558 WHEN undefined_table THEN
741 559 RAISE NOTICE 'A partition has been created %', partitioned_table;
742 560 EXECUTE format('CREATE TABLE IF NOT EXISTS %s ( CHECK( report_group_time >= DATE %s AND report_group_time < DATE %s )) INHERITS (%s)',
743 561 partitioned_table,
744 562 quote_literal(date_trunc('month', NEW.report_group_time)::date) ,
745 563 quote_literal((date_trunc('month', NEW.report_group_time)::date + interval '1 month')::text),
746 564 main_table);
747 565 EXECUTE format('ALTER TABLE %s ADD CONSTRAINT pk_%s PRIMARY KEY(id);', partitioned_table, partitioned_table);
748 566 EXECUTE format('ALTER TABLE %s ADD CONSTRAINT fk_%s_resource_id FOREIGN KEY (resource_id) REFERENCES resources (resource_id) MATCH SIMPLE ON UPDATE CASCADE ON DELETE CASCADE;', partitioned_table, partitioned_table);
749 567 EXECUTE format('ALTER TABLE %s ADD CONSTRAINT fk_%s_group_id FOREIGN KEY (group_id) REFERENCES %s (id) MATCH SIMPLE ON UPDATE CASCADE ON DELETE CASCADE;', partitioned_table, partitioned_table, partitioned_parent_table);
750 568 EXECUTE format('CREATE INDEX ix_%s_report_group_time ON %s USING btree (report_group_time);', partitioned_table, partitioned_table);
751 569 EXECUTE format('CREATE INDEX ix_%s_group_id ON %s USING btree (group_id);', partitioned_table, partitioned_table);
752 570 EXECUTE format('CREATE INDEX ix_%s_resource_id ON %s USING btree (resource_id);', partitioned_table, partitioned_table);
753 571 EXECUTE 'INSERT INTO ' || partitioned_table || ' SELECT(' || TG_TABLE_NAME || ' ' || quote_literal(NEW) || ').*;';
754 572 END;
755 573 RETURN NULL;
756 574 END
757 575 $$;
758 """
759 )
576 ''')
760 577
761 op.execute(
762 """
578 op.execute('''
763 579 CREATE TRIGGER partition_reports BEFORE INSERT ON reports FOR EACH ROW EXECUTE PROCEDURE partition_reports();
764 """
765 )
580 ''')
581
766 582
767 op.execute(
768 """
583 op.execute('''
769 584 CREATE OR REPLACE FUNCTION partition_slow_calls() RETURNS trigger
770 585 LANGUAGE plpgsql SECURITY DEFINER
771 586 AS $$
772 587 DECLARE
773 588 main_table varchar := 'slow_calls';
774 589 partitioned_table varchar := '';
775 590 partitioned_parent_table varchar := '';
776 591 BEGIN
777 592
778 593 partitioned_table := main_table || '_p_' || date_part('year', NEW.report_group_time)::TEXT || '_' || DATE_part('month', NEW.report_group_time);
779 594 partitioned_parent_table := 'reports_p_' || date_part('year', NEW.report_group_time)::TEXT || '_' || DATE_part('month', NEW.report_group_time);
780 595
781 596 BEGIN
782 597 EXECUTE 'INSERT INTO ' || partitioned_table || ' SELECT(' || TG_TABLE_NAME || ' ' || quote_literal(NEW) || ').*;';
783 598 EXCEPTION
784 599 WHEN undefined_table THEN
785 600 RAISE NOTICE 'A partition has been created %', partitioned_table;
786 601 EXECUTE format('CREATE TABLE IF NOT EXISTS %s ( CHECK( report_group_time >= DATE %s AND report_group_time < DATE %s )) INHERITS (%s)',
787 602 partitioned_table,
788 603 quote_literal(date_trunc('month', NEW.report_group_time)::date) ,
789 604 quote_literal((date_trunc('month', NEW.report_group_time)::date + interval '1 month')::text),
790 605 main_table);
791 606 EXECUTE format('ALTER TABLE %s ADD CONSTRAINT pk_%s PRIMARY KEY(id);', partitioned_table, partitioned_table);
792 607 EXECUTE format('ALTER TABLE %s ADD CONSTRAINT fk_%s_resource_id FOREIGN KEY (resource_id) REFERENCES resources (resource_id) MATCH SIMPLE ON UPDATE CASCADE ON DELETE CASCADE;', partitioned_table, partitioned_table);
793 608 EXECUTE format('ALTER TABLE %s ADD CONSTRAINT fk_%s_report_id FOREIGN KEY (report_id) REFERENCES %s (id) MATCH SIMPLE ON UPDATE CASCADE ON DELETE CASCADE;', partitioned_table, partitioned_table, partitioned_parent_table);
794 609 EXECUTE format('CREATE INDEX ix_%s_resource_id ON %s USING btree (resource_id);', partitioned_table, partitioned_table);
795 610 EXECUTE format('CREATE INDEX ix_%s_report_id ON %s USING btree (report_id);', partitioned_table, partitioned_table);
796 611 EXECUTE format('CREATE INDEX ix_%s_timestamp ON %s USING btree (timestamp);', partitioned_table, partitioned_table);
797 612 EXECUTE 'INSERT INTO ' || partitioned_table || ' SELECT(' || TG_TABLE_NAME || ' ' || quote_literal(NEW) || ').*;';
798 613 END;
799 614 RETURN NULL;
800 615 END
801 616 $$;
802 """
803 )
617 ''')
804 618
805 op.execute(
806 """
619 op.execute('''
807 620 CREATE TRIGGER partition_slow_calls BEFORE INSERT ON slow_calls FOR EACH ROW EXECUTE PROCEDURE partition_slow_calls();
808 """
809 )
810
621 ''')
811 622
812 623 def downgrade():
813 624 pass
@@ -1,40 +1,32 b''
1 1 """connect resources to alert_channels
2 2
3 3 Revision ID: e9fcfbdd9498
4 4 Revises: 55b6e612672f
5 5 Create Date: 2018-02-28 13:52:50.717217
6 6
7 7 """
8 8
9 9 # revision identifiers, used by Alembic.
10 revision = "e9fcfbdd9498"
11 down_revision = "55b6e612672f"
10 revision = 'e9fcfbdd9498'
11 down_revision = '55b6e612672f'
12 12
13 13 from alembic import op
14 14 import sqlalchemy as sa
15 15
16 16
17 17 def upgrade():
18 18 op.create_table(
19 "channels_resources",
20 sa.Column(
21 "channel_pkey",
22 sa.Integer,
23 sa.ForeignKey(
24 "alert_channels.pkey", ondelete="CASCADE", onupdate="CASCADE"
25 ),
26 primary_key=True,
27 ),
28 sa.Column(
29 "resource_id",
30 sa.Integer,
31 sa.ForeignKey(
32 "resources.resource_id", ondelete="CASCADE", onupdate="CASCADE"
33 ),
34 primary_key=True,
35 ),
19 'channels_resources',
20 sa.Column('channel_pkey', sa.Integer,
21 sa.ForeignKey('alert_channels.pkey',
22 ondelete='CASCADE', onupdate='CASCADE'),
23 primary_key=True),
24 sa.Column('resource_id', sa.Integer,
25 sa.ForeignKey('resources.resource_id',
26 ondelete='CASCADE', onupdate='CASCADE'),
27 primary_key=True)
36 28 )
37 29
38 30
39 31 def downgrade():
40 op.drop_table("channels_resources")
32 op.drop_table('channels_resources')
@@ -1,139 +1,130 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors
4 4 #
5 5 # Licensed under the Apache License, Version 2.0 (the "License");
6 6 # you may not use this file except in compliance with the License.
7 7 # You may obtain a copy of the License at
8 8 #
9 9 # http://www.apache.org/licenses/LICENSE-2.0
10 10 #
11 11 # Unless required by applicable law or agreed to in writing, software
12 12 # distributed under the License is distributed on an "AS IS" BASIS,
13 13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 14 # See the License for the specific language governing permissions and
15 15 # limitations under the License.
16 16
17 17 import logging
18 18
19 19 from sqlalchemy.ext.declarative import declarative_base
20 20 from sqlalchemy import MetaData
21 21 from sqlalchemy.orm import scoped_session
22 22 from sqlalchemy.orm import sessionmaker
23 23 from zope.sqlalchemy import ZopeTransactionExtension
24 24 import ziggurat_foundations
25 25 from ziggurat_foundations.models.base import get_db_session
26 26
27 27 log = logging.getLogger(__name__)
28 28
29 29 DBSession = scoped_session(sessionmaker(extension=ZopeTransactionExtension()))
30 30
31 31 NAMING_CONVENTION = {
32 "ix": "ix_%(column_0_label)s",
32 "ix": 'ix_%(column_0_label)s',
33 33 "uq": "uq_%(table_name)s_%(column_0_name)s",
34 34 "ck": "ck_%(table_name)s_%(constraint_name)s",
35 35 "fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s",
36 "pk": "pk_%(table_name)s",
36 "pk": "pk_%(table_name)s"
37 37 }
38 38
39 39 metadata = MetaData(naming_convention=NAMING_CONVENTION)
40 40 Base = declarative_base(metadata=metadata)
41 41
42 42 # optional for request.db approach
43 43 ziggurat_foundations.models.DBSession = DBSession
44 44
45 45
46 46 class Datastores(object):
47 47 redis = None
48 48 es = None
49 49
50 50
51 51 def register_datastores(es_conn, redis_conn, redis_lockmgr):
52 52 Datastores.es = es_conn
53 53 Datastores.redis = redis_conn
54 54 Datastores.lockmgr = redis_lockmgr
55 55
56 56
57 57 class SliceableESQuery(object):
58 58 def __init__(self, query, sort_query=None, aggregations=False, **kwconfig):
59 59 self.query = query
60 60 self.sort_query = sort_query
61 61 self.aggregations = aggregations
62 self.items_per_page = kwconfig.pop("items_per_page", 10)
63 self.page = kwconfig.pop("page", 1)
62 self.items_per_page = kwconfig.pop('items_per_page', 10)
63 self.page = kwconfig.pop('page', 1)
64 64 self.kwconfig = kwconfig
65 65 self.result = None
66 66
67 67 def __getitem__(self, index):
68 68 config = self.kwconfig.copy()
69 config["from_"] = index.start
69 config['es_from'] = index.start
70 70 query = self.query.copy()
71 71 if self.sort_query:
72 72 query.update(self.sort_query)
73 self.result = Datastores.es.search(
74 body=query, size=self.items_per_page, **config
75 )
73 self.result = Datastores.es.search(query, size=self.items_per_page,
74 **config)
76 75 if self.aggregations:
77 self.items = self.result.get("aggregations")
76 self.items = self.result.get('aggregations')
78 77 else:
79 self.items = self.result["hits"]["hits"]
78 self.items = self.result['hits']['hits']
80 79
81 80 return self.items
82 81
83 82 def __iter__(self):
84 83 return self.result
85 84
86 85 def __len__(self):
87 86 config = self.kwconfig.copy()
88 87 query = self.query.copy()
89 self.result = Datastores.es.search(
90 body=query, size=self.items_per_page, **config
91 )
88 self.result = Datastores.es.search(query, size=self.items_per_page,
89 **config)
92 90 if self.aggregations:
93 self.items = self.result.get("aggregations")
91 self.items = self.result.get('aggregations')
94 92 else:
95 self.items = self.result["hits"]["hits"]
93 self.items = self.result['hits']['hits']
96 94
97 count = int(self.result["hits"]["total"])
95 count = int(self.result['hits']['total'])
98 96 return count if count < 5000 else 5000
99 97
100 98
101 99 from appenlight.models.resource import Resource
102 100 from appenlight.models.application import Application
103 101 from appenlight.models.user import User
104 102 from appenlight.models.alert_channel import AlertChannel
105 103 from appenlight.models.alert_channel_action import AlertChannelAction
106 104 from appenlight.models.metric import Metric
107 from appenlight.models.application_postprocess_conf import ApplicationPostprocessConf
105 from appenlight.models.application_postprocess_conf import \
106 ApplicationPostprocessConf
108 107 from appenlight.models.auth_token import AuthToken
109 108 from appenlight.models.event import Event
110 109 from appenlight.models.external_identity import ExternalIdentity
111 110 from appenlight.models.group import Group
112 111 from appenlight.models.group_permission import GroupPermission
113 112 from appenlight.models.group_resource_permission import GroupResourcePermission
114 113 from appenlight.models.log import Log
115 114 from appenlight.models.plugin_config import PluginConfig
116 115 from appenlight.models.report import Report
117 116 from appenlight.models.report_group import ReportGroup
118 117 from appenlight.models.report_comment import ReportComment
119 118 from appenlight.models.report_assignment import ReportAssignment
120 119 from appenlight.models.report_stat import ReportStat
121 120 from appenlight.models.slow_call import SlowCall
122 121 from appenlight.models.tag import Tag
123 122 from appenlight.models.user_group import UserGroup
124 123 from appenlight.models.user_permission import UserPermission
125 124 from appenlight.models.user_resource_permission import UserResourcePermission
126 125 from ziggurat_foundations import ziggurat_model_init
127 126
128 ziggurat_model_init(
129 User,
130 Group,
131 UserGroup,
132 GroupPermission,
133 UserPermission,
134 UserResourcePermission,
135 GroupResourcePermission,
136 Resource,
137 ExternalIdentity,
138 passwordmanager=None,
139 )
127 ziggurat_model_init(User, Group, UserGroup, GroupPermission, UserPermission,
128 UserResourcePermission, GroupResourcePermission,
129 Resource,
130 ExternalIdentity, passwordmanager=None)
@@ -1,298 +1,305 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors
4 4 #
5 5 # Licensed under the Apache License, Version 2.0 (the "License");
6 6 # you may not use this file except in compliance with the License.
7 7 # You may obtain a copy of the License at
8 8 #
9 9 # http://www.apache.org/licenses/LICENSE-2.0
10 10 #
11 11 # Unless required by applicable law or agreed to in writing, software
12 12 # distributed under the License is distributed on an "AS IS" BASIS,
13 13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 14 # See the License for the specific language governing permissions and
15 15 # limitations under the License.
16 16
17 17 import logging
18 18 import sqlalchemy as sa
19 19 import urllib.request, urllib.parse, urllib.error
20 20 from datetime import timedelta
21 21 from appenlight.models import Base
22 22 from appenlight.lib.utils.date_utils import convert_date
23 23 from sqlalchemy.dialects.postgresql import JSON
24 24 from ziggurat_foundations.models.base import BaseModel
25 25
26 26 log = logging.getLogger(__name__)
27 27
28 28 #
29 29 channel_rules_m2m_table = sa.Table(
30 "channels_actions",
31 Base.metadata,
32 sa.Column("channel_pkey", sa.Integer, sa.ForeignKey("alert_channels.pkey")),
33 sa.Column("action_pkey", sa.Integer, sa.ForeignKey("alert_channels_actions.pkey")),
30 'channels_actions', Base.metadata,
31 sa.Column('channel_pkey', sa.Integer,
32 sa.ForeignKey('alert_channels.pkey')),
33 sa.Column('action_pkey', sa.Integer,
34 sa.ForeignKey('alert_channels_actions.pkey'))
34 35 )
35 36
36 37 channel_resources_m2m_table = sa.Table(
37 "channels_resources",
38 Base.metadata,
39 sa.Column("channel_pkey", sa.Integer, sa.ForeignKey("alert_channels.pkey")),
40 sa.Column("resource_id", sa.Integer, sa.ForeignKey("resources.resource_id")),
38 'channels_resources', Base.metadata,
39 sa.Column('channel_pkey', sa.Integer,
40 sa.ForeignKey('alert_channels.pkey')),
41 sa.Column('resource_id', sa.Integer,
42 sa.ForeignKey('resources.resource_id'))
41 43 )
42 44
43 DATE_FRMT = "%Y-%m-%dT%H:%M"
45 DATE_FRMT = '%Y-%m-%dT%H:%M'
44 46
45 47
46 48 class AlertChannel(Base, BaseModel):
47 49 """
48 50 Stores information about possible alerting options
49 51 """
50
51 __tablename__ = "alert_channels"
52 __possible_channel_names__ = ["email"]
52 __tablename__ = 'alert_channels'
53 __possible_channel_names__ = ['email']
53 54 __mapper_args__ = {
54 "polymorphic_on": "channel_name",
55 "polymorphic_identity": "integration",
55 'polymorphic_on': 'channel_name',
56 'polymorphic_identity': 'integration'
56 57 }
57 58
58 owner_id = sa.Column(
59 sa.Unicode(30),
60 sa.ForeignKey("users.id", onupdate="CASCADE", ondelete="CASCADE"),
61 )
59 owner_id = sa.Column(sa.Unicode(30),
60 sa.ForeignKey('users.id', onupdate='CASCADE',
61 ondelete='CASCADE'))
62 62 channel_name = sa.Column(sa.Unicode(25), nullable=False)
63 channel_value = sa.Column(sa.Unicode(80), nullable=False, default="")
64 channel_json_conf = sa.Column(JSON(), nullable=False, default="")
65 channel_validated = sa.Column(sa.Boolean, nullable=False, default=False)
66 send_alerts = sa.Column(sa.Boolean, nullable=False, default=True)
67 daily_digest = sa.Column(sa.Boolean, nullable=False, default=True)
68 integration_id = sa.Column(
69 sa.Integer, sa.ForeignKey("integrations.id"), nullable=True
70 )
63 channel_value = sa.Column(sa.Unicode(80), nullable=False, default='')
64 channel_json_conf = sa.Column(JSON(), nullable=False, default='')
65 channel_validated = sa.Column(sa.Boolean, nullable=False,
66 default=False)
67 send_alerts = sa.Column(sa.Boolean, nullable=False,
68 default=True)
69 daily_digest = sa.Column(sa.Boolean, nullable=False,
70 default=True)
71 integration_id = sa.Column(sa.Integer, sa.ForeignKey('integrations.id'),
72 nullable=True)
71 73 pkey = sa.Column(sa.Integer(), nullable=False, primary_key=True)
72 74
73 channel_actions = sa.orm.relationship(
74 "AlertChannelAction",
75 cascade="all",
76 passive_deletes=True,
77 passive_updates=True,
78 secondary=channel_rules_m2m_table,
79 backref="channels",
80 )
81 resources = sa.orm.relationship(
82 "Resource",
83 cascade="all",
84 passive_deletes=True,
85 passive_updates=True,
86 secondary=channel_resources_m2m_table,
87 backref="resources",
88 )
75 channel_actions = sa.orm.relationship('AlertChannelAction',
76 cascade="all",
77 passive_deletes=True,
78 passive_updates=True,
79 secondary=channel_rules_m2m_table,
80 backref='channels')
81 resources = sa.orm.relationship('Resource',
82 cascade="all",
83 passive_deletes=True,
84 passive_updates=True,
85 secondary=channel_resources_m2m_table,
86 backref='resources')
89 87
90 88 @property
91 89 def channel_visible_value(self):
92 90 if self.integration:
93 return "{}: {}".format(
94 self.channel_name, self.integration.resource.resource_name
91 return '{}: {}'.format(
92 self.channel_name,
93 self.integration.resource.resource_name
95 94 )
96 95
97 return "{}: {}".format(self.channel_name, self.channel_value)
96 return '{}: {}'.format(
97 self.channel_name,
98 self.channel_value
99 )
98 100
99 def get_dict(self, exclude_keys=None, include_keys=None, extended_info=True):
101 def get_dict(self, exclude_keys=None, include_keys=None,
102 extended_info=True):
100 103 """
101 104 Returns dictionary with required information that will be consumed by
102 105 angular
103 106 """
104 instance_dict = super(AlertChannel, self).get_dict(exclude_keys, include_keys)
107 instance_dict = super(AlertChannel, self).get_dict(exclude_keys,
108 include_keys)
105 109 exclude_keys_list = exclude_keys or []
106 110 include_keys_list = include_keys or []
107 111
108 instance_dict["supports_report_alerting"] = True
109 instance_dict["channel_visible_value"] = self.channel_visible_value
112 instance_dict['supports_report_alerting'] = True
113 instance_dict['channel_visible_value'] = self.channel_visible_value
110 114
111 115 if extended_info:
112 instance_dict["actions"] = [
113 rule.get_dict(extended_info=True) for rule in self.channel_actions
114 ]
116 instance_dict['actions'] = [
117 rule.get_dict(extended_info=True) for
118 rule in self.channel_actions]
115 119
116 del instance_dict["channel_json_conf"]
120 del instance_dict['channel_json_conf']
117 121
118 122 if self.integration:
119 123 instance_dict[
120 "supports_report_alerting"
121 ] = self.integration.supports_report_alerting
124 'supports_report_alerting'] = \
125 self.integration.supports_report_alerting
122 126 d = {}
123 127 for k in instance_dict.keys():
124 if k not in exclude_keys_list and (
125 k in include_keys_list or not include_keys
126 ):
128 if (k not in exclude_keys_list and
129 (k in include_keys_list or not include_keys)):
127 130 d[k] = instance_dict[k]
128 131 return d
129 132
130 133 def __repr__(self):
131 return "<AlertChannel: (%s,%s), user:%s>" % (
132 self.channel_name,
133 self.channel_value,
134 self.user_name,
135 )
134 return '<AlertChannel: (%s,%s), user:%s>' % (self.channel_name,
135 self.channel_value,
136 self.user_name,)
136 137
137 138 def send_digest(self, **kwargs):
138 139 """
139 140 This should implement daily top error report notifications
140 141 """
141 log.warning("send_digest NOT IMPLEMENTED")
142 log.warning('send_digest NOT IMPLEMENTED')
142 143
143 144 def notify_reports(self, **kwargs):
144 145 """
145 146 This should implement notification of reports that occured in 1 min
146 147 interval
147 148 """
148 log.warning("notify_reports NOT IMPLEMENTED")
149 log.warning('notify_reports NOT IMPLEMENTED')
149 150
150 151 def notify_alert(self, **kwargs):
151 152 """
152 153 Notify user of report/uptime/chart threshold events based on events alert
153 154 type
154 155
155 156 Kwargs:
156 157 application: application that the event applies for,
157 158 event: event that is notified,
158 159 user: user that should be notified
159 160 request: request object
160 161
161 162 """
162 alert_name = kwargs["event"].unified_alert_name()
163 if alert_name in ["slow_report_alert", "error_report_alert"]:
163 alert_name = kwargs['event'].unified_alert_name()
164 if alert_name in ['slow_report_alert', 'error_report_alert']:
164 165 self.notify_report_alert(**kwargs)
165 elif alert_name == "uptime_alert":
166 elif alert_name == 'uptime_alert':
166 167 self.notify_uptime_alert(**kwargs)
167 elif alert_name == "chart_alert":
168 elif alert_name == 'chart_alert':
168 169 self.notify_chart_alert(**kwargs)
169 170
170 171 def notify_chart_alert(self, **kwargs):
171 172 """
172 173 This should implement report open/close alerts notifications
173 174 """
174 log.warning("notify_chart_alert NOT IMPLEMENTED")
175 log.warning('notify_chart_alert NOT IMPLEMENTED')
175 176
176 177 def notify_report_alert(self, **kwargs):
177 178 """
178 179 This should implement report open/close alerts notifications
179 180 """
180 log.warning("notify_report_alert NOT IMPLEMENTED")
181 log.warning('notify_report_alert NOT IMPLEMENTED')
181 182
182 183 def notify_uptime_alert(self, **kwargs):
183 184 """
184 185 This should implement uptime open/close alerts notifications
185 186 """
186 log.warning("notify_uptime_alert NOT IMPLEMENTED")
187 log.warning('notify_uptime_alert NOT IMPLEMENTED')
187 188
188 189 def get_notification_basic_vars(self, kwargs):
189 190 """
190 191 Sets most common variables used later for rendering notifications for
191 192 channel
192 193 """
193 if "event" in kwargs:
194 kwargs["since_when"] = kwargs["event"].start_date
194 if 'event' in kwargs:
195 kwargs['since_when'] = kwargs['event'].start_date
195 196
196 url_start_date = kwargs.get("since_when") - timedelta(minutes=1)
197 url_end_date = kwargs.get("since_when") + timedelta(minutes=4)
197 url_start_date = kwargs.get('since_when') - timedelta(minutes=1)
198 url_end_date = kwargs.get('since_when') + timedelta(minutes=4)
198 199 tmpl_vars = {
199 "timestamp": kwargs["since_when"],
200 "user": kwargs["user"],
201 "since_when": kwargs.get("since_when"),
200 "timestamp": kwargs['since_when'],
201 "user": kwargs['user'],
202 "since_when": kwargs.get('since_when'),
202 203 "url_start_date": url_start_date,
203 "url_end_date": url_end_date,
204 "url_end_date": url_end_date
204 205 }
205 tmpl_vars["resource_name"] = kwargs["resource"].resource_name
206 tmpl_vars["resource"] = kwargs["resource"]
206 tmpl_vars["resource_name"] = kwargs['resource'].resource_name
207 tmpl_vars["resource"] = kwargs['resource']
207 208
208 if "event" in kwargs:
209 tmpl_vars["event_values"] = kwargs["event"].values
210 tmpl_vars["alert_type"] = kwargs["event"].unified_alert_name()
211 tmpl_vars["alert_action"] = kwargs["event"].unified_alert_action()
209 if 'event' in kwargs:
210 tmpl_vars['event_values'] = kwargs['event'].values
211 tmpl_vars['alert_type'] = kwargs['event'].unified_alert_name()
212 tmpl_vars['alert_action'] = kwargs['event'].unified_alert_action()
212 213 return tmpl_vars
213 214
214 215 def report_alert_notification_vars(self, kwargs):
215 216 tmpl_vars = self.get_notification_basic_vars(kwargs)
216 reports = kwargs.get("reports", [])
217 reports = kwargs.get('reports', [])
217 218 tmpl_vars["reports"] = reports
218 219 tmpl_vars["confirmed_total"] = len(reports)
219 220
220 221 tmpl_vars["report_type"] = "error reports"
221 tmpl_vars["url_report_type"] = "report/list"
222 tmpl_vars["url_report_type"] = 'report/list'
222 223
223 alert_type = tmpl_vars.get("alert_type", "")
224 if "slow_report" in alert_type:
224 alert_type = tmpl_vars.get('alert_type', '')
225 if 'slow_report' in alert_type:
225 226 tmpl_vars["report_type"] = "slow reports"
226 tmpl_vars["url_report_type"] = "report/list_slow"
227 tmpl_vars["url_report_type"] = 'report/list_slow'
227 228
228 app_url = kwargs["request"].registry.settings["_mail_url"]
229 app_url = kwargs['request'].registry.settings['_mail_url']
229 230
230 destination_url = kwargs["request"].route_url("/", _app_url=app_url)
231 destination_url = kwargs['request'].route_url('/',
232 _app_url=app_url)
231 233 if alert_type:
232 destination_url += "ui/{}?resource={}&start_date={}&end_date={}".format(
234 destination_url += 'ui/{}?resource={}&start_date={}&end_date={}'.format(
233 235 tmpl_vars["url_report_type"],
234 tmpl_vars["resource"].resource_id,
235 tmpl_vars["url_start_date"].strftime(DATE_FRMT),
236 tmpl_vars["url_end_date"].strftime(DATE_FRMT),
236 tmpl_vars['resource'].resource_id,
237 tmpl_vars['url_start_date'].strftime(DATE_FRMT),
238 tmpl_vars['url_end_date'].strftime(DATE_FRMT)
237 239 )
238 240 else:
239 destination_url += "ui/{}?resource={}".format(
240 tmpl_vars["url_report_type"], tmpl_vars["resource"].resource_id
241 destination_url += 'ui/{}?resource={}'.format(
242 tmpl_vars["url_report_type"],
243 tmpl_vars['resource'].resource_id
241 244 )
242 245 tmpl_vars["destination_url"] = destination_url
243 246
244 247 return tmpl_vars
245 248
246 249 def uptime_alert_notification_vars(self, kwargs):
247 250 tmpl_vars = self.get_notification_basic_vars(kwargs)
248 app_url = kwargs["request"].registry.settings["_mail_url"]
249 destination_url = kwargs["request"].route_url("/", _app_url=app_url)
250 destination_url += "ui/{}?resource={}".format(
251 "uptime", tmpl_vars["resource"].resource_id
252 )
253 tmpl_vars["destination_url"] = destination_url
254
255 reason = ""
256 e_values = tmpl_vars.get("event_values")
257
258 if e_values and e_values.get("response_time") == 0:
259 reason += " Response time was slower than 20 seconds."
251 app_url = kwargs['request'].registry.settings['_mail_url']
252 destination_url = kwargs['request'].route_url('/', _app_url=app_url)
253 destination_url += 'ui/{}?resource={}'.format(
254 'uptime',
255 tmpl_vars['resource'].resource_id)
256 tmpl_vars['destination_url'] = destination_url
257
258 reason = ''
259 e_values = tmpl_vars.get('event_values')
260
261 if e_values and e_values.get('response_time') == 0:
262 reason += ' Response time was slower than 20 seconds.'
260 263 elif e_values:
261 code = e_values.get("status_code")
262 reason += " Response status code: %s." % code
264 code = e_values.get('status_code')
265 reason += ' Response status code: %s.' % code
263 266
264 tmpl_vars["reason"] = reason
267 tmpl_vars['reason'] = reason
265 268 return tmpl_vars
266 269
267 270 def chart_alert_notification_vars(self, kwargs):
268 271 tmpl_vars = self.get_notification_basic_vars(kwargs)
269 tmpl_vars["chart_name"] = tmpl_vars["event_values"]["chart_name"]
270 tmpl_vars["action_name"] = tmpl_vars["event_values"].get("action_name") or ""
271 matched_values = tmpl_vars["event_values"]["matched_step_values"]
272 tmpl_vars["readable_values"] = []
273 for key, value in list(matched_values["values"].items()):
274 matched_label = matched_values["labels"].get(key)
272 tmpl_vars['chart_name'] = tmpl_vars['event_values']['chart_name']
273 tmpl_vars['action_name'] = tmpl_vars['event_values'].get(
274 'action_name') or ''
275 matched_values = tmpl_vars['event_values']['matched_step_values']
276 tmpl_vars['readable_values'] = []
277 for key, value in list(matched_values['values'].items()):
278 matched_label = matched_values['labels'].get(key)
275 279 if matched_label:
276 tmpl_vars["readable_values"].append(
277 {"label": matched_label["human_label"], "value": value}
278 )
279 tmpl_vars["readable_values"] = sorted(
280 tmpl_vars["readable_values"], key=lambda x: x["label"]
281 )
282 start_date = convert_date(tmpl_vars["event_values"]["start_interval"])
280 tmpl_vars['readable_values'].append({
281 'label': matched_label['human_label'],
282 'value': value
283 })
284 tmpl_vars['readable_values'] = sorted(tmpl_vars['readable_values'],
285 key=lambda x: x['label'])
286 start_date = convert_date(tmpl_vars['event_values']['start_interval'])
283 287 end_date = None
284 if tmpl_vars["event_values"].get("end_interval"):
285 end_date = convert_date(tmpl_vars["event_values"]["end_interval"])
288 if tmpl_vars['event_values'].get('end_interval'):
289 end_date = convert_date(tmpl_vars['event_values']['end_interval'])
286 290
287 app_url = kwargs["request"].registry.settings["_mail_url"]
288 destination_url = kwargs["request"].route_url("/", _app_url=app_url)
291 app_url = kwargs['request'].registry.settings['_mail_url']
292 destination_url = kwargs['request'].route_url('/', _app_url=app_url)
289 293 to_encode = {
290 "resource": tmpl_vars["event_values"]["resource"],
291 "start_date": start_date.strftime(DATE_FRMT),
294 'resource': tmpl_vars['event_values']['resource'],
295 'start_date': start_date.strftime(DATE_FRMT),
292 296 }
293 297 if end_date:
294 to_encode["end_date"] = end_date.strftime(DATE_FRMT)
298 to_encode['end_date'] = end_date.strftime(DATE_FRMT)
295 299
296 destination_url += "ui/{}?{}".format("logs", urllib.parse.urlencode(to_encode))
297 tmpl_vars["destination_url"] = destination_url
300 destination_url += 'ui/{}?{}'.format(
301 'logs',
302 urllib.parse.urlencode(to_encode)
303 )
304 tmpl_vars['destination_url'] = destination_url
298 305 return tmpl_vars
@@ -1,84 +1,79 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors
4 4 #
5 5 # Licensed under the Apache License, Version 2.0 (the "License");
6 6 # you may not use this file except in compliance with the License.
7 7 # You may obtain a copy of the License at
8 8 #
9 9 # http://www.apache.org/licenses/LICENSE-2.0
10 10 #
11 11 # Unless required by applicable law or agreed to in writing, software
12 12 # distributed under the License is distributed on an "AS IS" BASIS,
13 13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 14 # See the License for the specific language governing permissions and
15 15 # limitations under the License.
16 16
17 17 import sqlalchemy as sa
18 18
19 from ziggurat_foundations.models.services.resource import ResourceService
19 from appenlight.models.resource import Resource
20 20 from appenlight.models import Base, get_db_session
21 21 from sqlalchemy.orm import validates
22 22 from ziggurat_foundations.models.base import BaseModel
23 23
24 24
25 25 class AlertChannelAction(Base, BaseModel):
26 26 """
27 27 Stores notifications conditions for user's alert channels
28 28 This is later used for rule parsing like "alert if http_status == 500"
29 29 """
30 __tablename__ = 'alert_channels_actions'
30 31
31 __tablename__ = "alert_channels_actions"
32 types = ['report', 'chart']
32 33
33 types = ["report", "chart"]
34
35 owner_id = sa.Column(
36 sa.Integer, sa.ForeignKey("users.id", onupdate="CASCADE", ondelete="CASCADE")
37 )
34 owner_id = sa.Column(sa.Integer,
35 sa.ForeignKey('users.id', onupdate='CASCADE',
36 ondelete='CASCADE'))
38 37 resource_id = sa.Column(sa.Integer())
39 action = sa.Column(sa.Unicode(10), nullable=False, default="always")
38 action = sa.Column(sa.Unicode(10), nullable=False, default='always')
40 39 type = sa.Column(sa.Unicode(10), nullable=False)
41 40 other_id = sa.Column(sa.Unicode(40))
42 41 pkey = sa.Column(sa.Integer(), nullable=False, primary_key=True)
43 rule = sa.Column(
44 sa.dialects.postgresql.JSON,
45 nullable=False,
46 default={"field": "http_status", "op": "ge", "value": "500"},
47 )
42 rule = sa.Column(sa.dialects.postgresql.JSON,
43 nullable=False, default={'field': 'http_status',
44 "op": "ge", "value": "500"})
48 45 config = sa.Column(sa.dialects.postgresql.JSON)
49 46 name = sa.Column(sa.Unicode(255))
50 47
51 @validates("notify_type")
48 @validates('notify_type')
52 49 def validate_email(self, key, notify_type):
53 assert notify_type in ["always", "only_first"]
50 assert notify_type in ['always', 'only_first']
54 51 return notify_type
55 52
56 53 def resource_name(self, db_session=None):
57 54 db_session = get_db_session(db_session)
58 55 if self.resource_id:
59 return ResourceService.by_resource_id(
60 self.resource_id, db_session=db_session
61 ).resource_name
56 return Resource.by_resource_id(self.resource_id,
57 db_session=db_session).resource_name
62 58 else:
63 return "any resource"
59 return 'any resource'
64 60
65 def get_dict(self, exclude_keys=None, include_keys=None, extended_info=False):
61 def get_dict(self, exclude_keys=None, include_keys=None,
62 extended_info=False):
66 63 """
67 64 Returns dictionary with required information that will be consumed by
68 65 angular
69 66 """
70 67 instance_dict = super(AlertChannelAction, self).get_dict()
71 68 exclude_keys_list = exclude_keys or []
72 69 include_keys_list = include_keys or []
73 70 if extended_info:
74 instance_dict["channels"] = [
75 c.get_dict(extended_info=False) for c in self.channels
76 ]
71 instance_dict['channels'] = [
72 c.get_dict(extended_info=False) for c in self.channels]
77 73
78 74 d = {}
79 75 for k in instance_dict.keys():
80 if k not in exclude_keys_list and (
81 k in include_keys_list or not include_keys
82 ):
76 if (k not in exclude_keys_list and
77 (k in include_keys_list or not include_keys)):
83 78 d[k] = instance_dict[k]
84 79 return d
@@ -1,15 +1,16 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors
4 4 #
5 5 # Licensed under the Apache License, Version 2.0 (the "License");
6 6 # you may not use this file except in compliance with the License.
7 7 # You may obtain a copy of the License at
8 8 #
9 9 # http://www.apache.org/licenses/LICENSE-2.0
10 10 #
11 11 # Unless required by applicable law or agreed to in writing, software
12 12 # distributed under the License is distributed on an "AS IS" BASIS,
13 13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 14 # See the License for the specific language governing permissions and
15 15 # limitations under the License.
16
@@ -1,191 +1,188 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors
4 4 #
5 5 # Licensed under the Apache License, Version 2.0 (the "License");
6 6 # you may not use this file except in compliance with the License.
7 7 # You may obtain a copy of the License at
8 8 #
9 9 # http://www.apache.org/licenses/LICENSE-2.0
10 10 #
11 11 # Unless required by applicable law or agreed to in writing, software
12 12 # distributed under the License is distributed on an "AS IS" BASIS,
13 13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 14 # See the License for the specific language governing permissions and
15 15 # limitations under the License.
16 16
17 17 import logging
18 18 from appenlight.models.alert_channel import AlertChannel
19 19 from appenlight.models.integrations.campfire import CampfireIntegration
20 20 from webhelpers2.text import truncate
21 21
22 22 log = logging.getLogger(__name__)
23 23
24 24
25 25 class CampfireAlertChannel(AlertChannel):
26 __mapper_args__ = {"polymorphic_identity": "campfire"}
26 __mapper_args__ = {
27 'polymorphic_identity': 'campfire'
28 }
27 29
28 30 @property
29 31 def client(self):
30 32 client = CampfireIntegration.create_client(
31 self.integration.config["api_token"], self.integration.config["account"]
32 )
33 self.integration.config['api_token'],
34 self.integration.config['account'])
33 35 return client
34 36
35 37 def notify_reports(self, **kwargs):
36 38 """
37 39 Notify user of individual reports
38 40
39 41 kwargs:
40 42 application: application that the event applies for,
41 43 user: user that should be notified
42 44 request: request object
43 45 since_when: reports are newer than this time value,
44 46 reports: list of reports to render
45 47
46 48 """
47 49 template_vars = self.report_alert_notification_vars(kwargs)
48 50
49 app_url = kwargs["request"].registry.settings["_mail_url"]
50 destination_url = kwargs["request"].route_url("/", app_url=app_url)
51 f_args = (
52 "report",
53 template_vars["resource"].resource_id,
54 template_vars["url_start_date"].strftime("%Y-%m-%dT%H:%M"),
55 template_vars["url_end_date"].strftime("%Y-%m-%dT%H:%M"),
56 )
57 destination_url += "ui/{}?resource={}&start_date={}&end_date={}".format(*f_args)
58
59 if template_vars["confirmed_total"] > 1:
51 app_url = kwargs['request'].registry.settings['_mail_url']
52 destination_url = kwargs['request'].route_url('/',
53 app_url=app_url)
54 f_args = ('report',
55 template_vars['resource'].resource_id,
56 template_vars['url_start_date'].strftime('%Y-%m-%dT%H:%M'),
57 template_vars['url_end_date'].strftime('%Y-%m-%dT%H:%M'))
58 destination_url += 'ui/{}?resource={}&start_date={}&end_date={}'.format(
59 *f_args)
60
61 if template_vars['confirmed_total'] > 1:
60 62 template_vars["title"] = "%s - %s reports" % (
61 template_vars["resource_name"],
62 template_vars["confirmed_total"],
63 template_vars['resource_name'],
64 template_vars['confirmed_total'],
63 65 )
64 66 else:
65 error_title = truncate(
66 template_vars["reports"][0][1].error or "slow report", 90
67 )
67 error_title = truncate(template_vars['reports'][0][1].error or
68 'slow report', 90)
68 69 template_vars["title"] = "%s - '%s' report" % (
69 template_vars["resource_name"],
70 error_title,
71 )
70 template_vars['resource_name'],
71 error_title)
72 72
73 template_vars["title"] += " " + destination_url
73 template_vars["title"] += ' ' + destination_url
74 74
75 log_msg = "NOTIFY : %s via %s :: %s reports" % (
76 kwargs["user"].user_name,
75 log_msg = 'NOTIFY : %s via %s :: %s reports' % (
76 kwargs['user'].user_name,
77 77 self.channel_visible_value,
78 template_vars["confirmed_total"],
79 )
78 template_vars['confirmed_total'])
80 79 log.warning(log_msg)
81 80
82 for room in self.integration.config["rooms"].split(","):
81 for room in self.integration.config['rooms'].split(','):
83 82 self.client.speak_to_room(room.strip(), template_vars["title"])
84 83
85 84 def notify_report_alert(self, **kwargs):
86 85 """
87 86 Build and send report alert notification
88 87
89 88 Kwargs:
90 89 application: application that the event applies for,
91 90 event: event that is notified,
92 91 user: user that should be notified
93 92 request: request object
94 93
95 94 """
96 95 template_vars = self.report_alert_notification_vars(kwargs)
97 96
98 if kwargs["event"].unified_alert_action() == "OPEN":
99 title = "ALERT %s: %s - %s %s %s" % (
100 template_vars["alert_action"],
101 template_vars["resource_name"],
102 kwargs["event"].values["reports"],
103 template_vars["report_type"],
104 template_vars["destination_url"],
97 if kwargs['event'].unified_alert_action() == 'OPEN':
98 title = 'ALERT %s: %s - %s %s %s' % (
99 template_vars['alert_action'],
100 template_vars['resource_name'],
101 kwargs['event'].values['reports'],
102 template_vars['report_type'],
103 template_vars['destination_url']
105 104 )
106 105
107 106 else:
108 title = "ALERT %s: %s type: %s" % (
109 template_vars["alert_action"],
110 template_vars["resource_name"],
111 template_vars["alert_type"].replace("_", " "),
107 title = 'ALERT %s: %s type: %s' % (
108 template_vars['alert_action'],
109 template_vars['resource_name'],
110 template_vars['alert_type'].replace('_', ' '),
112 111 )
113 for room in self.integration.config["rooms"].split(","):
114 self.client.speak_to_room(room.strip(), title, sound="VUVUZELA")
112 for room in self.integration.config['rooms'].split(','):
113 self.client.speak_to_room(room.strip(), title, sound='VUVUZELA')
115 114
116 115 def notify_uptime_alert(self, **kwargs):
117 116 """
118 117 Build and send uptime alert notification
119 118
120 119 Kwargs:
121 120 application: application that the event applies for,
122 121 event: event that is notified,
123 122 user: user that should be notified
124 123 request: request object
125 124
126 125 """
127 126 template_vars = self.uptime_alert_notification_vars(kwargs)
128 127
129 message = "ALERT %s: %s has uptime issues %s\n\n" % (
130 template_vars["alert_action"],
131 template_vars["resource_name"],
132 template_vars["destination_url"],
128 message = 'ALERT %s: %s has uptime issues %s\n\n' % (
129 template_vars['alert_action'],
130 template_vars['resource_name'],
131 template_vars['destination_url']
133 132 )
134 message += template_vars["reason"]
133 message += template_vars['reason']
135 134
136 for room in self.integration.config["rooms"].split(","):
137 self.client.speak_to_room(room.strip(), message, sound="VUVUZELA")
135 for room in self.integration.config['rooms'].split(','):
136 self.client.speak_to_room(room.strip(), message, sound='VUVUZELA')
138 137
139 138 def send_digest(self, **kwargs):
140 139 """
141 140 Build and send daily digest notification
142 141
143 142 kwargs:
144 143 application: application that the event applies for,
145 144 user: user that should be notified
146 145 request: request object
147 146 since_when: reports are newer than this time value,
148 147 reports: list of reports to render
149 148
150 149 """
151 150 template_vars = self.report_alert_notification_vars(kwargs)
152 f_args = (template_vars["resource_name"], template_vars["confirmed_total"])
151 f_args = (template_vars['resource_name'],
152 template_vars['confirmed_total'],)
153 153 message = "Daily report digest: %s - %s reports" % f_args
154 message += "{}\n".format(template_vars["destination_url"])
155 for room in self.integration.config["rooms"].split(","):
154 message += '{}\n'.format(template_vars['destination_url'])
155 for room in self.integration.config['rooms'].split(','):
156 156 self.client.speak_to_room(room.strip(), message)
157 157
158 log_msg = "DIGEST : %s via %s :: %s reports" % (
159 kwargs["user"].user_name,
158 log_msg = 'DIGEST : %s via %s :: %s reports' % (
159 kwargs['user'].user_name,
160 160 self.channel_visible_value,
161 template_vars["confirmed_total"],
162 )
161 template_vars['confirmed_total'])
163 162 log.warning(log_msg)
164 163
165 164 def notify_chart_alert(self, **kwargs):
166 165 """
167 166 Build and send chart alert notification
168 167
169 168 Kwargs:
170 169 application: application that the event applies for,
171 170 event: event that is notified,
172 171 user: user that should be notified
173 172 request: request object
174 173
175 174 """
176 175 template_vars = self.chart_alert_notification_vars(kwargs)
177 message = (
178 'ALERT {}: value in "{}" chart: '
179 'met alert "{}" criteria {} \n'.format(
180 template_vars["alert_action"],
181 template_vars["chart_name"],
182 template_vars["action_name"],
183 template_vars["destination_url"],
184 )
176 message = 'ALERT {}: value in "{}" chart: ' \
177 'met alert "{}" criteria {} \n'.format(
178 template_vars['alert_action'],
179 template_vars['chart_name'],
180 template_vars['action_name'],
181 template_vars['destination_url']
185 182 )
186 183
187 for item in template_vars["readable_values"]:
188 message += "{}: {}\n".format(item["label"], item["value"])
184 for item in template_vars['readable_values']:
185 message += '{}: {}\n'.format(item['label'], item['value'])
189 186
190 for room in self.integration.config["rooms"].split(","):
191 self.client.speak_to_room(room.strip(), message, sound="VUVUZELA")
187 for room in self.integration.config['rooms'].split(','):
188 self.client.speak_to_room(room.strip(), message, sound='VUVUZELA')
@@ -1,192 +1,175 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors
4 4 #
5 5 # Licensed under the Apache License, Version 2.0 (the "License");
6 6 # you may not use this file except in compliance with the License.
7 7 # You may obtain a copy of the License at
8 8 #
9 9 # http://www.apache.org/licenses/LICENSE-2.0
10 10 #
11 11 # Unless required by applicable law or agreed to in writing, software
12 12 # distributed under the License is distributed on an "AS IS" BASIS,
13 13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 14 # See the License for the specific language governing permissions and
15 15 # limitations under the License.
16 16
17 17 import logging
18 18 from appenlight.models.alert_channel import AlertChannel
19 19 from appenlight.models.services.user import UserService
20 20 from webhelpers2.text import truncate
21 21
22 22 log = logging.getLogger(__name__)
23 23
24 24
25 25 class EmailAlertChannel(AlertChannel):
26 26 """
27 27 Default email alerting channel
28 28 """
29 29
30 __mapper_args__ = {"polymorphic_identity": "email"}
30 __mapper_args__ = {
31 'polymorphic_identity': 'email'
32 }
31 33
32 34 def notify_reports(self, **kwargs):
33 35 """
34 36 Notify user of individual reports
35 37
36 38 kwargs:
37 39 application: application that the event applies for,
38 40 user: user that should be notified
39 41 request: request object
40 42 since_when: reports are newer than this time value,
41 43 reports: list of reports to render
42 44
43 45 """
44 46 template_vars = self.report_alert_notification_vars(kwargs)
45 47
46 if template_vars["confirmed_total"] > 1:
48 if template_vars['confirmed_total'] > 1:
47 49 template_vars["title"] = "AppEnlight :: %s - %s reports" % (
48 template_vars["resource_name"],
49 template_vars["confirmed_total"],
50 template_vars['resource_name'],
51 template_vars['confirmed_total'],
50 52 )
51 53 else:
52 error_title = truncate(
53 template_vars["reports"][0][1].error or "slow report", 20
54 )
54 error_title = truncate(template_vars['reports'][0][1].error or
55 'slow report', 20)
55 56 template_vars["title"] = "AppEnlight :: %s - '%s' report" % (
56 template_vars["resource_name"],
57 error_title,
58 )
59 UserService.send_email(
60 kwargs["request"],
61 [self.channel_value],
62 template_vars,
63 "/email_templates/notify_reports.jinja2",
64 )
65 log_msg = "NOTIFY : %s via %s :: %s reports" % (
66 kwargs["user"].user_name,
57 template_vars['resource_name'],
58 error_title)
59 UserService.send_email(kwargs['request'],
60 [self.channel_value],
61 template_vars,
62 '/email_templates/notify_reports.jinja2')
63 log_msg = 'NOTIFY : %s via %s :: %s reports' % (
64 kwargs['user'].user_name,
67 65 self.channel_visible_value,
68 template_vars["confirmed_total"],
69 )
66 template_vars['confirmed_total'])
70 67 log.warning(log_msg)
71 68
72 69 def send_digest(self, **kwargs):
73 70 """
74 71 Build and send daily digest notification
75 72
76 73 kwargs:
77 74 application: application that the event applies for,
78 75 user: user that should be notified
79 76 request: request object
80 77 since_when: reports are newer than this time value,
81 78 reports: list of reports to render
82 79
83 80 """
84 81 template_vars = self.report_alert_notification_vars(kwargs)
85 82 title = "AppEnlight :: Daily report digest: %s - %s reports"
86 83 template_vars["email_title"] = title % (
87 template_vars["resource_name"],
88 template_vars["confirmed_total"],
84 template_vars['resource_name'],
85 template_vars['confirmed_total'],
89 86 )
90 87
91 UserService.send_email(
92 kwargs["request"],
93 [self.channel_value],
94 template_vars,
95 "/email_templates/notify_reports.jinja2",
96 immediately=True,
97 silent=True,
98 )
99 log_msg = "DIGEST : %s via %s :: %s reports" % (
100 kwargs["user"].user_name,
88 UserService.send_email(kwargs['request'],
89 [self.channel_value],
90 template_vars,
91 '/email_templates/notify_reports.jinja2',
92 immediately=True,
93 silent=True)
94 log_msg = 'DIGEST : %s via %s :: %s reports' % (
95 kwargs['user'].user_name,
101 96 self.channel_visible_value,
102 template_vars["confirmed_total"],
103 )
97 template_vars['confirmed_total'])
104 98 log.warning(log_msg)
105 99
106 100 def notify_report_alert(self, **kwargs):
107 101 """
108 102 Build and send report alert notification
109 103
110 104 Kwargs:
111 105 application: application that the event applies for,
112 106 event: event that is notified,
113 107 user: user that should be notified
114 108 request: request object
115 109
116 110 """
117 111 template_vars = self.report_alert_notification_vars(kwargs)
118 112
119 if kwargs["event"].unified_alert_action() == "OPEN":
120 title = "AppEnlight :: ALERT %s: %s - %s %s" % (
121 template_vars["alert_action"],
122 template_vars["resource_name"],
123 kwargs["event"].values["reports"],
124 template_vars["report_type"],
113 if kwargs['event'].unified_alert_action() == 'OPEN':
114 title = 'AppEnlight :: ALERT %s: %s - %s %s' % (
115 template_vars['alert_action'],
116 template_vars['resource_name'],
117 kwargs['event'].values['reports'],
118 template_vars['report_type'],
125 119 )
126 120 else:
127 title = "AppEnlight :: ALERT %s: %s type: %s" % (
128 template_vars["alert_action"],
129 template_vars["resource_name"],
130 template_vars["alert_type"].replace("_", " "),
121 title = 'AppEnlight :: ALERT %s: %s type: %s' % (
122 template_vars['alert_action'],
123 template_vars['resource_name'],
124 template_vars['alert_type'].replace('_', ' '),
131 125 )
132 template_vars["email_title"] = title
133 UserService.send_email(
134 kwargs["request"],
135 [self.channel_value],
136 template_vars,
137 "/email_templates/alert_reports.jinja2",
138 )
126 template_vars['email_title'] = title
127 UserService.send_email(kwargs['request'], [self.channel_value],
128 template_vars,
129 '/email_templates/alert_reports.jinja2')
139 130
140 131 def notify_uptime_alert(self, **kwargs):
141 132 """
142 133 Build and send uptime alert notification
143 134
144 135 Kwargs:
145 136 application: application that the event applies for,
146 137 event: event that is notified,
147 138 user: user that should be notified
148 139 request: request object
149 140
150 141 """
151 142 template_vars = self.uptime_alert_notification_vars(kwargs)
152 title = "AppEnlight :: ALERT %s: %s has uptime issues" % (
153 template_vars["alert_action"],
154 template_vars["resource_name"],
143 title = 'AppEnlight :: ALERT %s: %s has uptime issues' % (
144 template_vars['alert_action'],
145 template_vars['resource_name'],
155 146 )
156 template_vars["email_title"] = title
147 template_vars['email_title'] = title
157 148
158 UserService.send_email(
159 kwargs["request"],
160 [self.channel_value],
161 template_vars,
162 "/email_templates/alert_uptime.jinja2",
163 )
149 UserService.send_email(kwargs['request'], [self.channel_value],
150 template_vars,
151 '/email_templates/alert_uptime.jinja2')
164 152
165 153 def notify_chart_alert(self, **kwargs):
166 154 """
167 155 Build and send chart alert notification
168 156
169 157 Kwargs:
170 158 application: application that the event applies for,
171 159 event: event that is notified,
172 160 user: user that should be notified
173 161 request: request object
174 162
175 163 """
176 164 template_vars = self.chart_alert_notification_vars(kwargs)
177 165
178 title = (
179 'AppEnlight :: ALERT {} value in "{}" chart'
180 ' met alert "{}" criteria'.format(
181 template_vars["alert_action"],
182 template_vars["chart_name"],
183 template_vars["action_name"],
184 )
185 )
186 template_vars["email_title"] = title
187 UserService.send_email(
188 kwargs["request"],
189 [self.channel_value],
190 template_vars,
191 "/email_templates/alert_chart.jinja2",
166 title = 'AppEnlight :: ALERT {} value in "{}" chart' \
167 ' met alert "{}" criteria'.format(
168 template_vars['alert_action'],
169 template_vars['chart_name'],
170 template_vars['action_name'],
192 171 )
172 template_vars['email_title'] = title
173 UserService.send_email(kwargs['request'], [self.channel_value],
174 template_vars,
175 '/email_templates/alert_chart.jinja2')
@@ -1,225 +1,233 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors
4 4 #
5 5 # Licensed under the Apache License, Version 2.0 (the "License");
6 6 # you may not use this file except in compliance with the License.
7 7 # You may obtain a copy of the License at
8 8 #
9 9 # http://www.apache.org/licenses/LICENSE-2.0
10 10 #
11 11 # Unless required by applicable law or agreed to in writing, software
12 12 # distributed under the License is distributed on an "AS IS" BASIS,
13 13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 14 # See the License for the specific language governing permissions and
15 15 # limitations under the License.
16 16
17 17 import logging
18 18 from appenlight.models.alert_channel import AlertChannel
19 19 from appenlight.models.integrations.flowdock import FlowdockIntegration
20 20 from webhelpers2.text import truncate
21 21
22 22 log = logging.getLogger(__name__)
23 23
24 24
25 25 class FlowdockAlertChannel(AlertChannel):
26 __mapper_args__ = {"polymorphic_identity": "flowdock"}
26 __mapper_args__ = {
27 'polymorphic_identity': 'flowdock'
28 }
27 29
28 30 def notify_reports(self, **kwargs):
29 31 """
30 32 Notify user of individual reports
31 33
32 34 kwargs:
33 35 application: application that the event applies for,
34 36 user: user that should be notified
35 37 request: request object
36 38 since_when: reports are newer than this time value,
37 39 reports: list of reports to render
38 40
39 41 """
40 42 template_vars = self.report_alert_notification_vars(kwargs)
41 43
42 app_url = kwargs["request"].registry.settings["_mail_url"]
43 destination_url = kwargs["request"].route_url("/", _app_url=app_url)
44 f_args = (
45 "report",
46 template_vars["resource"].resource_id,
47 template_vars["url_start_date"].strftime("%Y-%m-%dT%H:%M"),
48 template_vars["url_end_date"].strftime("%Y-%m-%dT%H:%M"),
49 )
50 destination_url += "ui/{}?resource={}&start_date={}&end_date={}".format(*f_args)
51
52 if template_vars["confirmed_total"] > 1:
44 app_url = kwargs['request'].registry.settings['_mail_url']
45 destination_url = kwargs['request'].route_url('/',
46 _app_url=app_url)
47 f_args = ('report',
48 template_vars['resource'].resource_id,
49 template_vars['url_start_date'].strftime('%Y-%m-%dT%H:%M'),
50 template_vars['url_end_date'].strftime('%Y-%m-%dT%H:%M'))
51 destination_url += 'ui/{}?resource={}&start_date={}&end_date={}'.format(
52 *f_args)
53
54 if template_vars['confirmed_total'] > 1:
53 55 template_vars["title"] = "%s - %s reports" % (
54 template_vars["resource_name"],
55 template_vars["confirmed_total"],
56 template_vars['resource_name'],
57 template_vars['confirmed_total'],
56 58 )
57 59 else:
58 error_title = truncate(
59 template_vars["reports"][0][1].error or "slow report", 90
60 )
60 error_title = truncate(template_vars['reports'][0][1].error or
61 'slow report', 90)
61 62 template_vars["title"] = "%s - '%s' report" % (
62 template_vars["resource_name"],
63 error_title,
64 )
63 template_vars['resource_name'],
64 error_title)
65 65
66 log_msg = "NOTIFY : %s via %s :: %s reports" % (
67 kwargs["user"].user_name,
66 log_msg = 'NOTIFY : %s via %s :: %s reports' % (
67 kwargs['user'].user_name,
68 68 self.channel_visible_value,
69 template_vars["confirmed_total"],
70 )
69 template_vars['confirmed_total'])
71 70 log.warning(log_msg)
72 71
73 client = FlowdockIntegration.create_client(self.integration.config["api_token"])
72 client = FlowdockIntegration.create_client(
73 self.integration.config['api_token'])
74 74 payload = {
75 75 "source": "AppEnlight",
76 "from_address": kwargs["request"].registry.settings["mailing.from_email"],
76 "from_address": kwargs['request'].registry.settings[
77 'mailing.from_email'],
77 78 "subject": template_vars["title"],
78 79 "content": "New report present",
79 80 "tags": ["appenlight"],
80 "link": destination_url,
81 "link": destination_url
81 82 }
82 83 client.send_to_inbox(payload)
83 84
84 85 def notify_report_alert(self, **kwargs):
85 86 """
86 87 Build and send report alert notification
87 88
88 89 Kwargs:
89 90 application: application that the event applies for,
90 91 event: event that is notified,
91 92 user: user that should be notified
92 93 request: request object
93 94
94 95 """
95 96 template_vars = self.report_alert_notification_vars(kwargs)
96 97
97 if kwargs["event"].unified_alert_action() == "OPEN":
98 if kwargs['event'].unified_alert_action() == 'OPEN':
98 99
99 title = "ALERT %s: %s - %s %s" % (
100 template_vars["alert_action"],
101 template_vars["resource_name"],
102 kwargs["event"].values["reports"],
103 template_vars["report_type"],
100 title = 'ALERT %s: %s - %s %s' % (
101 template_vars['alert_action'],
102 template_vars['resource_name'],
103 kwargs['event'].values['reports'],
104 template_vars['report_type'],
104 105 )
105 106
106 107 else:
107 title = "ALERT %s: %s type: %s" % (
108 template_vars["alert_action"],
109 template_vars["resource_name"],
110 template_vars["alert_type"].replace("_", " "),
108 title = 'ALERT %s: %s type: %s' % (
109 template_vars['alert_action'],
110 template_vars['resource_name'],
111 template_vars['alert_type'].replace('_', ' '),
111 112 )
112 113
113 client = FlowdockIntegration.create_client(self.integration.config["api_token"])
114 client = FlowdockIntegration.create_client(
115 self.integration.config['api_token'])
114 116 payload = {
115 117 "source": "AppEnlight",
116 "from_address": kwargs["request"].registry.settings["mailing.from_email"],
118 "from_address": kwargs['request'].registry.settings[
119 'mailing.from_email'],
117 120 "subject": title,
118 "content": "Investigation required",
119 "tags": ["appenlight", "alert", template_vars["alert_type"]],
120 "link": template_vars["destination_url"],
121 "content": 'Investigation required',
122 "tags": ["appenlight", "alert", template_vars['alert_type']],
123 "link": template_vars['destination_url']
121 124 }
122 125 client.send_to_inbox(payload)
123 126
124 127 def notify_uptime_alert(self, **kwargs):
125 128 """
126 129 Build and send uptime alert notification
127 130
128 131 Kwargs:
129 132 application: application that the event applies for,
130 133 event: event that is notified,
131 134 user: user that should be notified
132 135 request: request object
133 136
134 137 """
135 138 template_vars = self.uptime_alert_notification_vars(kwargs)
136 139
137 message = "ALERT %s: %s has uptime issues" % (
138 template_vars["alert_action"],
139 template_vars["resource_name"],
140 message = 'ALERT %s: %s has uptime issues' % (
141 template_vars['alert_action'],
142 template_vars['resource_name'],
140 143 )
141 submessage = "Info: "
142 submessage += template_vars["reason"]
144 submessage = 'Info: '
145 submessage += template_vars['reason']
143 146
144 client = FlowdockIntegration.create_client(self.integration.config["api_token"])
147 client = FlowdockIntegration.create_client(
148 self.integration.config['api_token'])
145 149 payload = {
146 150 "source": "AppEnlight",
147 "from_address": kwargs["request"].registry.settings["mailing.from_email"],
151 "from_address": kwargs['request'].registry.settings[
152 'mailing.from_email'],
148 153 "subject": message,
149 154 "content": submessage,
150 "tags": ["appenlight", "alert", "uptime"],
151 "link": template_vars["destination_url"],
155 "tags": ["appenlight", "alert", 'uptime'],
156 "link": template_vars['destination_url']
152 157 }
153 158 client.send_to_inbox(payload)
154 159
155 160 def send_digest(self, **kwargs):
156 161 """
157 162 Build and send daily digest notification
158 163
159 164 kwargs:
160 165 application: application that the event applies for,
161 166 user: user that should be notified
162 167 request: request object
163 168 since_when: reports are newer than this time value,
164 169 reports: list of reports to render
165 170
166 171 """
167 172 template_vars = self.report_alert_notification_vars(kwargs)
168 173 message = "Daily report digest: %s - %s reports" % (
169 template_vars["resource_name"],
170 template_vars["confirmed_total"],
171 )
174 template_vars['resource_name'], template_vars['confirmed_total'])
172 175
173 f_args = (template_vars["confirmed_total"], template_vars["timestamp"])
176 f_args = (template_vars['confirmed_total'],
177 template_vars['timestamp'])
174 178
175 179 payload = {
176 180 "source": "AppEnlight",
177 "from_address": kwargs["request"].registry.settings["mailing.from_email"],
181 "from_address": kwargs['request'].registry.settings[
182 'mailing.from_email'],
178 183 "subject": message,
179 "content": "%s reports in total since %s" % f_args,
184 "content": '%s reports in total since %s' % f_args,
180 185 "tags": ["appenlight", "digest"],
181 "link": template_vars["destination_url"],
186 "link": template_vars['destination_url']
182 187 }
183 188
184 client = FlowdockIntegration.create_client(self.integration.config["api_token"])
189 client = FlowdockIntegration.create_client(
190 self.integration.config['api_token'])
185 191 client.send_to_inbox(payload)
186 192
187 log_msg = "DIGEST : %s via %s :: %s reports" % (
188 kwargs["user"].user_name,
193 log_msg = 'DIGEST : %s via %s :: %s reports' % (
194 kwargs['user'].user_name,
189 195 self.channel_visible_value,
190 template_vars["confirmed_total"],
191 )
196 template_vars['confirmed_total'])
192 197 log.warning(log_msg)
193 198
194 199 def notify_chart_alert(self, **kwargs):
195 200 """
196 201 Build and send chart alert notification
197 202
198 203 Kwargs:
199 204 application: application that the event applies for,
200 205 event: event that is notified,
201 206 user: user that should be notified
202 207 request: request object
203 208
204 209 """
205 210 template_vars = self.chart_alert_notification_vars(kwargs)
206 211
207 message = 'ALERT {}: value in "{}" chart ' 'met alert "{}" criteria'.format(
208 template_vars["alert_action"],
209 template_vars["chart_name"],
210 template_vars["action_name"],
212 message = 'ALERT {}: value in "{}" chart ' \
213 'met alert "{}" criteria'.format(
214 template_vars['alert_action'],
215 template_vars['chart_name'],
216 template_vars['action_name'],
211 217 )
212 submessage = "Info: "
213 for item in template_vars["readable_values"]:
214 submessage += "{}: {}\n".format(item["label"], item["value"])
218 submessage = 'Info: '
219 for item in template_vars['readable_values']:
220 submessage += '{}: {}\n'.format(item['label'], item['value'])
215 221
216 client = FlowdockIntegration.create_client(self.integration.config["api_token"])
222 client = FlowdockIntegration.create_client(
223 self.integration.config['api_token'])
217 224 payload = {
218 225 "source": "AppEnlight",
219 "from_address": kwargs["request"].registry.settings["mailing.from_email"],
226 "from_address": kwargs['request'].registry.settings[
227 'mailing.from_email'],
220 228 "subject": message,
221 229 "content": submessage,
222 "tags": ["appenlight", "alert", "chart"],
223 "link": template_vars["destination_url"],
230 "tags": ["appenlight", "alert", 'chart'],
231 "link": template_vars['destination_url']
224 232 }
225 233 client.send_to_inbox(payload)
@@ -1,238 +1,229 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors
4 4 #
5 5 # Licensed under the Apache License, Version 2.0 (the "License");
6 6 # you may not use this file except in compliance with the License.
7 7 # You may obtain a copy of the License at
8 8 #
9 9 # http://www.apache.org/licenses/LICENSE-2.0
10 10 #
11 11 # Unless required by applicable law or agreed to in writing, software
12 12 # distributed under the License is distributed on an "AS IS" BASIS,
13 13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 14 # See the License for the specific language governing permissions and
15 15 # limitations under the License.
16 16
17 17 import logging
18 18 from appenlight.models.alert_channel import AlertChannel
19 19 from appenlight.models.integrations.hipchat import HipchatIntegration
20 20 from webhelpers2.text import truncate
21 21
22 22 log = logging.getLogger(__name__)
23 23
24 24
25 25 class HipchatAlertChannel(AlertChannel):
26 __mapper_args__ = {"polymorphic_identity": "hipchat"}
26 __mapper_args__ = {
27 'polymorphic_identity': 'hipchat'
28 }
27 29
28 30 def notify_reports(self, **kwargs):
29 31 """
30 32 Notify user of individual reports
31 33
32 34 kwargs:
33 35 application: application that the event applies for,
34 36 user: user that should be notified
35 37 request: request object
36 38 since_when: reports are newer than this time value,
37 39 reports: list of reports to render
38 40
39 41 """
40 42 template_vars = self.report_alert_notification_vars(kwargs)
41 43
42 app_url = kwargs["request"].registry.settings["_mail_url"]
43 destination_url = kwargs["request"].route_url("/", _app_url=app_url)
44 f_args = (
45 "report",
46 template_vars["resource"].resource_id,
47 template_vars["url_start_date"].strftime("%Y-%m-%dT%H:%M"),
48 template_vars["url_end_date"].strftime("%Y-%m-%dT%H:%M"),
49 )
50 destination_url += "ui/{}?resource={}&start_date={}&end_date={}".format(*f_args)
51
52 if template_vars["confirmed_total"] > 1:
44 app_url = kwargs['request'].registry.settings['_mail_url']
45 destination_url = kwargs['request'].route_url('/',
46 _app_url=app_url)
47 f_args = ('report',
48 template_vars['resource'].resource_id,
49 template_vars['url_start_date'].strftime('%Y-%m-%dT%H:%M'),
50 template_vars['url_end_date'].strftime('%Y-%m-%dT%H:%M'))
51 destination_url += 'ui/{}?resource={}&start_date={}&end_date={}'.format(
52 *f_args)
53
54 if template_vars['confirmed_total'] > 1:
53 55 template_vars["title"] = "%s - %s reports" % (
54 template_vars["resource_name"],
55 template_vars["confirmed_total"],
56 template_vars['resource_name'],
57 template_vars['confirmed_total'],
56 58 )
57 59 else:
58 error_title = truncate(
59 template_vars["reports"][0][1].error or "slow report", 90
60 )
60 error_title = truncate(template_vars['reports'][0][1].error or
61 'slow report', 90)
61 62 template_vars["title"] = "%s - '%s' report" % (
62 template_vars["resource_name"],
63 error_title,
64 )
63 template_vars['resource_name'],
64 error_title)
65 65
66 template_vars["title"] += " " + destination_url
66 template_vars["title"] += ' ' + destination_url
67 67
68 log_msg = "NOTIFY : %s via %s :: %s reports" % (
69 kwargs["user"].user_name,
68 log_msg = 'NOTIFY : %s via %s :: %s reports' % (
69 kwargs['user'].user_name,
70 70 self.channel_visible_value,
71 template_vars["confirmed_total"],
72 )
71 template_vars['confirmed_total'])
73 72 log.warning(log_msg)
74 73
75 client = HipchatIntegration.create_client(self.integration.config["api_token"])
76 for room in self.integration.config["rooms"].split(","):
77 client.send(
78 {
79 "message_format": "text",
80 "message": template_vars["title"],
81 "from": "AppEnlight",
82 "room_id": room.strip(),
83 "color": "yellow",
84 }
85 )
74 client = HipchatIntegration.create_client(
75 self.integration.config['api_token'])
76 for room in self.integration.config['rooms'].split(','):
77 client.send({
78 "message_format": 'text',
79 "message": template_vars["title"],
80 "from": "AppEnlight",
81 "room_id": room.strip(),
82 "color": "yellow"
83 })
86 84
87 85 def notify_report_alert(self, **kwargs):
88 86 """
89 87 Build and send report alert notification
90 88
91 89 Kwargs:
92 90 application: application that the event applies for,
93 91 event: event that is notified,
94 92 user: user that should be notified
95 93 request: request object
96 94
97 95 """
98 96 template_vars = self.report_alert_notification_vars(kwargs)
99 97
100 if kwargs["event"].unified_alert_action() == "OPEN":
98 if kwargs['event'].unified_alert_action() == 'OPEN':
101 99
102 title = "ALERT %s: %s - %s %s" % (
103 template_vars["alert_action"],
104 template_vars["resource_name"],
105 kwargs["event"].values["reports"],
106 template_vars["report_type"],
100 title = 'ALERT %s: %s - %s %s' % (
101 template_vars['alert_action'],
102 template_vars['resource_name'],
103 kwargs['event'].values['reports'],
104 template_vars['report_type'],
107 105 )
108 106
109 107 else:
110 title = "ALERT %s: %s type: %s" % (
111 template_vars["alert_action"],
112 template_vars["resource_name"],
113 template_vars["alert_type"].replace("_", " "),
108 title = 'ALERT %s: %s type: %s' % (
109 template_vars['alert_action'],
110 template_vars['resource_name'],
111 template_vars['alert_type'].replace('_', ' '),
114 112 )
115 113
116 title += "\n " + template_vars["destination_url"]
114 title += '\n ' + template_vars['destination_url']
117 115
118 api_token = self.integration.config["api_token"]
116 api_token = self.integration.config['api_token']
119 117 client = HipchatIntegration.create_client(api_token)
120 for room in self.integration.config["rooms"].split(","):
121 client.send(
122 {
123 "message_format": "text",
124 "message": title,
125 "from": "AppEnlight",
126 "room_id": room.strip(),
127 "color": "red",
128 "notify": "1",
129 }
130 )
118 for room in self.integration.config['rooms'].split(','):
119 client.send({
120 "message_format": 'text',
121 "message": title,
122 "from": "AppEnlight",
123 "room_id": room.strip(),
124 "color": "red",
125 "notify": '1'
126 })
131 127
132 128 def notify_uptime_alert(self, **kwargs):
133 129 """
134 130 Build and send uptime alert notification
135 131
136 132 Kwargs:
137 133 application: application that the event applies for,
138 134 event: event that is notified,
139 135 user: user that should be notified
140 136 request: request object
141 137
142 138 """
143 139 template_vars = self.uptime_alert_notification_vars(kwargs)
144 140
145 message = "ALERT %s: %s has uptime issues\n" % (
146 template_vars["alert_action"],
147 template_vars["resource_name"],
141 message = 'ALERT %s: %s has uptime issues\n' % (
142 template_vars['alert_action'],
143 template_vars['resource_name'],
148 144 )
149 message += template_vars["reason"]
150 message += "\n{}".format(template_vars["destination_url"])
145 message += template_vars['reason']
146 message += '\n{}'.format(template_vars['destination_url'])
151 147
152 api_token = self.integration.config["api_token"]
148 api_token = self.integration.config['api_token']
153 149 client = HipchatIntegration.create_client(api_token)
154 for room in self.integration.config["rooms"].split(","):
155 client.send(
156 {
157 "message_format": "text",
158 "message": message,
159 "from": "AppEnlight",
160 "room_id": room.strip(),
161 "color": "red",
162 "notify": "1",
163 }
164 )
150 for room in self.integration.config['rooms'].split(','):
151 client.send({
152 "message_format": 'text',
153 "message": message,
154 "from": "AppEnlight",
155 "room_id": room.strip(),
156 "color": "red",
157 "notify": '1'
158 })
165 159
166 160 def notify_chart_alert(self, **kwargs):
167 161 """
168 162 Build and send chart alert notification
169 163
170 164 Kwargs:
171 165 application: application that the event applies for,
172 166 event: event that is notified,
173 167 user: user that should be notified
174 168 request: request object
175 169
176 170 """
177 171 template_vars = self.chart_alert_notification_vars(kwargs)
178 message = 'ALERT {}: value in "{}" chart: ' 'met alert "{}" criteria\n'.format(
179 template_vars["alert_action"],
180 template_vars["chart_name"],
181 template_vars["action_name"],
172 message = 'ALERT {}: value in "{}" chart: ' \
173 'met alert "{}" criteria\n'.format(
174 template_vars['alert_action'],
175 template_vars['chart_name'],
176 template_vars['action_name'],
182 177 )
183 178
184 for item in template_vars["readable_values"]:
185 message += "{}: {}\n".format(item["label"], item["value"])
179 for item in template_vars['readable_values']:
180 message += '{}: {}\n'.format(item['label'], item['value'])
186 181
187 message += template_vars["destination_url"]
182 message += template_vars['destination_url']
188 183
189 api_token = self.integration.config["api_token"]
184 api_token = self.integration.config['api_token']
190 185 client = HipchatIntegration.create_client(api_token)
191 for room in self.integration.config["rooms"].split(","):
192 client.send(
193 {
194 "message_format": "text",
195 "message": message,
196 "from": "AppEnlight",
197 "room_id": room.strip(),
198 "color": "red",
199 "notify": "1",
200 }
201 )
186 for room in self.integration.config['rooms'].split(','):
187 client.send({
188 "message_format": 'text',
189 "message": message,
190 "from": "AppEnlight",
191 "room_id": room.strip(),
192 "color": "red",
193 "notify": '1'
194 })
202 195
203 196 def send_digest(self, **kwargs):
204 197 """
205 198 Build and send daily digest notification
206 199
207 200 kwargs:
208 201 application: application that the event applies for,
209 202 user: user that should be notified
210 203 request: request object
211 204 since_when: reports are newer than this time value,
212 205 reports: list of reports to render
213 206
214 207 """
215 208 template_vars = self.report_alert_notification_vars(kwargs)
216 f_args = (template_vars["resource_name"], template_vars["confirmed_total"])
209 f_args = (template_vars['resource_name'],
210 template_vars['confirmed_total'],)
217 211 message = "Daily report digest: %s - %s reports" % f_args
218 message += "\n{}".format(template_vars["destination_url"])
219 api_token = self.integration.config["api_token"]
212 message += '\n{}'.format(template_vars['destination_url'])
213 api_token = self.integration.config['api_token']
220 214 client = HipchatIntegration.create_client(api_token)
221 for room in self.integration.config["rooms"].split(","):
222 client.send(
223 {
224 "message_format": "text",
225 "message": message,
226 "from": "AppEnlight",
227 "room_id": room.strip(),
228 "color": "green",
229 "notify": "1",
230 }
231 )
232
233 log_msg = "DIGEST : %s via %s :: %s reports" % (
234 kwargs["user"].user_name,
215 for room in self.integration.config['rooms'].split(','):
216 client.send({
217 "message_format": 'text',
218 "message": message,
219 "from": "AppEnlight",
220 "room_id": room.strip(),
221 "color": "green",
222 "notify": '1'
223 })
224
225 log_msg = 'DIGEST : %s via %s :: %s reports' % (
226 kwargs['user'].user_name,
235 227 self.channel_visible_value,
236 template_vars["confirmed_total"],
237 )
228 template_vars['confirmed_total'])
238 229 log.warning(log_msg)
@@ -1,270 +1,285 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors
4 4 #
5 5 # Licensed under the Apache License, Version 2.0 (the "License");
6 6 # you may not use this file except in compliance with the License.
7 7 # You may obtain a copy of the License at
8 8 #
9 9 # http://www.apache.org/licenses/LICENSE-2.0
10 10 #
11 11 # Unless required by applicable law or agreed to in writing, software
12 12 # distributed under the License is distributed on an "AS IS" BASIS,
13 13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 14 # See the License for the specific language governing permissions and
15 15 # limitations under the License.
16 16
17 17 import logging
18 18 from appenlight.models.alert_channel import AlertChannel
19 19 from appenlight.models.integrations.slack import SlackIntegration
20 20 from webhelpers2.text import truncate
21 21
22 22 log = logging.getLogger(__name__)
23 23
24 24
25 25 class SlackAlertChannel(AlertChannel):
26 __mapper_args__ = {"polymorphic_identity": "slack"}
26 __mapper_args__ = {
27 'polymorphic_identity': 'slack'
28 }
27 29
28 30 def notify_reports(self, **kwargs):
29 31 """
30 32 Notify user of individual reports
31 33
32 34 kwargs:
33 35 application: application that the event applies for,
34 36 user: user that should be notified
35 37 request: request object
36 38 since_when: reports are newer than this time value,
37 39 reports: list of reports to render
38 40
39 41 """
40 42 template_vars = self.report_alert_notification_vars(kwargs)
41 template_vars["title"] = template_vars["resource_name"]
43 template_vars["title"] = template_vars['resource_name']
42 44
43 if template_vars["confirmed_total"] > 1:
44 template_vars["subtext"] = "%s reports" % template_vars["confirmed_total"]
45 if template_vars['confirmed_total'] > 1:
46 template_vars['subtext'] = '%s reports' % template_vars[
47 'confirmed_total']
45 48 else:
46 error_title = truncate(
47 template_vars["reports"][0][1].error or "slow report", 90
48 )
49 template_vars["subtext"] = error_title
49 error_title = truncate(template_vars['reports'][0][1].error or
50 'slow report', 90)
51 template_vars['subtext'] = error_title
50 52
51 log_msg = "NOTIFY : %s via %s :: %s reports" % (
52 kwargs["user"].user_name,
53 log_msg = 'NOTIFY : %s via %s :: %s reports' % (
54 kwargs['user'].user_name,
53 55 self.channel_visible_value,
54 template_vars["confirmed_total"],
55 )
56 template_vars['confirmed_total'])
56 57 log.warning(log_msg)
57 58
58 client = SlackIntegration.create_client(self.integration.config["webhook_url"])
59 client = SlackIntegration.create_client(
60 self.integration.config['webhook_url'])
59 61 report_data = {
60 62 "username": "AppEnlight",
61 63 "icon_emoji": ":fire:",
62 64 "attachments": [
63 65 {
64 66 "mrkdwn_in": ["text", "pretext", "title", "fallback"],
65 "fallback": "*%s* - <%s| Browse>"
66 % (template_vars["title"], template_vars["destination_url"]),
67 "pretext": "*%s* - <%s| Browse>"
68 % (template_vars["title"], template_vars["destination_url"]),
67 "fallback": "*%s* - <%s| Browse>" % (
68 template_vars["title"],
69 template_vars['destination_url']),
70 "pretext": "*%s* - <%s| Browse>" % (
71 template_vars["title"],
72 template_vars['destination_url']),
69 73 "color": "warning",
70 74 "fields": [
71 {"value": "Info: %s" % template_vars["subtext"], "short": False}
72 ],
75 {
76 "value": 'Info: %s' % template_vars['subtext'],
77 "short": False
78 }
79 ]
73 80 }
74 ],
81 ]
75 82 }
76 83 client.make_request(data=report_data)
77 84
78 85 def notify_report_alert(self, **kwargs):
79 86 """
80 87 Build and send report alert notification
81 88
82 89 Kwargs:
83 90 application: application that the event applies for,
84 91 event: event that is notified,
85 92 user: user that should be notified
86 93 request: request object
87 94
88 95 """
89 96 template_vars = self.report_alert_notification_vars(kwargs)
90 97
91 if kwargs["event"].unified_alert_action() == "OPEN":
92 title = "*ALERT %s*: %s" % (
93 template_vars["alert_action"],
94 template_vars["resource_name"],
98 if kwargs['event'].unified_alert_action() == 'OPEN':
99 title = '*ALERT %s*: %s' % (
100 template_vars['alert_action'],
101 template_vars['resource_name']
95 102 )
96 103
97 template_vars["subtext"] = "Got at least %s %s" % (
98 kwargs["event"].values["reports"],
99 template_vars["report_type"],
104 template_vars['subtext'] = 'Got at least %s %s' % (
105 kwargs['event'].values['reports'],
106 template_vars['report_type']
100 107 )
101 108
102 109 else:
103 title = "*ALERT %s*: %s" % (
104 template_vars["alert_action"],
105 template_vars["resource_name"],
110 title = '*ALERT %s*: %s' % (
111 template_vars['alert_action'],
112 template_vars['resource_name'],
106 113 )
107 114
108 template_vars["subtext"] = ""
115 template_vars['subtext'] = ''
109 116
110 alert_type = template_vars["alert_type"].replace("_", " ")
111 alert_type = alert_type.replace("alert", "").capitalize()
117 alert_type = template_vars['alert_type'].replace('_', ' ')
118 alert_type = alert_type.replace('alert', '').capitalize()
112 119
113 template_vars["type"] = "Type: %s" % alert_type
120 template_vars['type'] = "Type: %s" % alert_type
114 121
115 client = SlackIntegration.create_client(self.integration.config["webhook_url"])
122 client = SlackIntegration.create_client(
123 self.integration.config['webhook_url']
124 )
116 125 report_data = {
117 126 "username": "AppEnlight",
118 127 "icon_emoji": ":rage:",
119 128 "attachments": [
120 129 {
121 130 "mrkdwn_in": ["text", "pretext", "title", "fallback"],
122 "fallback": "%s - <%s| Browse>"
123 % (title, template_vars["destination_url"]),
124 "pretext": "%s - <%s| Browse>"
125 % (title, template_vars["destination_url"]),
131 "fallback": "%s - <%s| Browse>" % (
132 title, template_vars['destination_url']),
133 "pretext": "%s - <%s| Browse>" % (
134 title, template_vars['destination_url']),
126 135 "color": "danger",
127 136 "fields": [
128 137 {
129 "title": template_vars["type"],
130 "value": template_vars["subtext"],
131 "short": False,
138 "title": template_vars['type'],
139 "value": template_vars['subtext'],
140 "short": False
132 141 }
133 ],
142 ]
134 143 }
135 ],
144 ]
136 145 }
137 146 client.make_request(data=report_data)
138 147
139 148 def notify_uptime_alert(self, **kwargs):
140 149 """
141 150 Build and send uptime alert notification
142 151
143 152 Kwargs:
144 153 application: application that the event applies for,
145 154 event: event that is notified,
146 155 user: user that should be notified
147 156 request: request object
148 157
149 158 """
150 159 template_vars = self.uptime_alert_notification_vars(kwargs)
151 160
152 title = "*ALERT %s*: %s" % (
153 template_vars["alert_action"],
154 template_vars["resource_name"],
161 title = '*ALERT %s*: %s' % (
162 template_vars['alert_action'],
163 template_vars['resource_name'],
164 )
165 client = SlackIntegration.create_client(
166 self.integration.config['webhook_url']
155 167 )
156 client = SlackIntegration.create_client(self.integration.config["webhook_url"])
157 168 report_data = {
158 169 "username": "AppEnlight",
159 170 "icon_emoji": ":rage:",
160 171 "attachments": [
161 172 {
162 173 "mrkdwn_in": ["text", "pretext", "title", "fallback"],
163 174 "fallback": "{} - <{}| Browse>".format(
164 title, template_vars["destination_url"]
165 ),
175 title, template_vars['destination_url']),
166 176 "pretext": "{} - <{}| Browse>".format(
167 title, template_vars["destination_url"]
168 ),
177 title, template_vars['destination_url']),
169 178 "color": "danger",
170 179 "fields": [
171 180 {
172 181 "title": "Application has uptime issues",
173 "value": template_vars["reason"],
174 "short": False,
182 "value": template_vars['reason'],
183 "short": False
175 184 }
176 ],
185 ]
177 186 }
178 ],
187 ]
179 188 }
180 189 client.make_request(data=report_data)
181 190
182 191 def notify_chart_alert(self, **kwargs):
183 192 """
184 193 Build and send chart alert notification
185 194
186 195 Kwargs:
187 196 application: application that the event applies for,
188 197 event: event that is notified,
189 198 user: user that should be notified
190 199 request: request object
191 200
192 201 """
193 202 template_vars = self.chart_alert_notification_vars(kwargs)
194 203
195 title = '*ALERT {}*: value in *"{}"* chart ' 'met alert *"{}"* criteria'.format(
196 template_vars["alert_action"],
197 template_vars["chart_name"],
198 template_vars["action_name"],
204 title = '*ALERT {}*: value in *"{}"* chart ' \
205 'met alert *"{}"* criteria'.format(
206 template_vars['alert_action'],
207 template_vars['chart_name'],
208 template_vars['action_name'],
199 209 )
200 210
201 subtext = ""
202 for item in template_vars["readable_values"]:
203 subtext += "{} - {}\n".format(item["label"], item["value"])
211 subtext = ''
212 for item in template_vars['readable_values']:
213 subtext += '{} - {}\n'.format(item['label'], item['value'])
204 214
205 client = SlackIntegration.create_client(self.integration.config["webhook_url"])
215 client = SlackIntegration.create_client(
216 self.integration.config['webhook_url']
217 )
206 218 report_data = {
207 219 "username": "AppEnlight",
208 220 "icon_emoji": ":rage:",
209 221 "attachments": [
210 {
211 "mrkdwn_in": ["text", "pretext", "title", "fallback"],
212 "fallback": "{} - <{}| Browse>".format(
213 title, template_vars["destination_url"]
214 ),
215 "pretext": "{} - <{}| Browse>".format(
216 title, template_vars["destination_url"]
217 ),
218 "color": "danger",
219 "fields": [
220 {
221 "title": "Following criteria were met:",
222 "value": subtext,
223 "short": False,
224 }
225 ],
226 }
227 ],
222 {"mrkdwn_in": ["text", "pretext", "title", "fallback"],
223 "fallback": "{} - <{}| Browse>".format(
224 title, template_vars['destination_url']),
225 "pretext": "{} - <{}| Browse>".format(
226 title, template_vars['destination_url']),
227 "color": "danger",
228 "fields": [
229 {
230 "title": "Following criteria were met:",
231 "value": subtext,
232 "short": False
233 }
234 ]
235 }
236 ]
228 237 }
229 238 client.make_request(data=report_data)
230 239
231 240 def send_digest(self, **kwargs):
232 241 """
233 242 Build and send daily digest notification
234 243
235 244 kwargs:
236 245 application: application that the event applies for,
237 246 user: user that should be notified
238 247 request: request object
239 248 since_when: reports are newer than this time value,
240 249 reports: list of reports to render
241 250
242 251 """
243 252 template_vars = self.report_alert_notification_vars(kwargs)
244 title = "*Daily report digest*: %s" % template_vars["resource_name"]
253 title = "*Daily report digest*: %s" % template_vars['resource_name']
245 254
246 subtext = "%s reports" % template_vars["confirmed_total"]
255 subtext = '%s reports' % template_vars['confirmed_total']
247 256
248 client = SlackIntegration.create_client(self.integration.config["webhook_url"])
257 client = SlackIntegration.create_client(
258 self.integration.config['webhook_url']
259 )
249 260 report_data = {
250 261 "username": "AppEnlight",
251 262 "attachments": [
252 263 {
253 264 "mrkdwn_in": ["text", "pretext", "title", "fallback"],
254 "fallback": "%s : <%s| Browse>"
255 % (title, template_vars["destination_url"]),
256 "pretext": "%s: <%s| Browse>"
257 % (title, template_vars["destination_url"]),
265 "fallback": "%s : <%s| Browse>" % (
266 title, template_vars['destination_url']),
267 "pretext": "%s: <%s| Browse>" % (
268 title, template_vars['destination_url']),
258 269 "color": "good",
259 "fields": [{"title": "Got at least: %s" % subtext, "short": False}],
270 "fields": [
271 {
272 "title": "Got at least: %s" % subtext,
273 "short": False
274 }
275 ]
260 276 }
261 ],
277 ]
262 278 }
263 279 client.make_request(data=report_data)
264 280
265 log_msg = "DIGEST : %s via %s :: %s reports" % (
266 kwargs["user"].user_name,
281 log_msg = 'DIGEST : %s via %s :: %s reports' % (
282 kwargs['user'].user_name,
267 283 self.channel_visible_value,
268 template_vars["confirmed_total"],
269 )
284 template_vars['confirmed_total'])
270 285 log.warning(log_msg)
@@ -1,113 +1,104 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors
4 4 #
5 5 # Licensed under the Apache License, Version 2.0 (the "License");
6 6 # you may not use this file except in compliance with the License.
7 7 # You may obtain a copy of the License at
8 8 #
9 9 # http://www.apache.org/licenses/LICENSE-2.0
10 10 #
11 11 # Unless required by applicable law or agreed to in writing, software
12 12 # distributed under the License is distributed on an "AS IS" BASIS,
13 13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 14 # See the License for the specific language governing permissions and
15 15 # limitations under the License.
16 16
17 17 import uuid
18 18 import logging
19 19 import sqlalchemy as sa
20 20 from appenlight.models.resource import Resource
21 21 from sqlalchemy.orm import aliased
22 22
23 23 log = logging.getLogger(__name__)
24 24
25 25
26 26 def generate_api_key():
27 uid = str(uuid.uuid4()).replace("-", "")
27 uid = str(uuid.uuid4()).replace('-', '')
28 28 return uid[0:32]
29 29
30 30
31 31 class Application(Resource):
32 32 """
33 33 Resource of application type
34 34 """
35 35
36 __tablename__ = "applications"
37 __mapper_args__ = {"polymorphic_identity": "application"}
36 __tablename__ = 'applications'
37 __mapper_args__ = {'polymorphic_identity': 'application'}
38 38
39 39 # lists configurable possible permissions for this resource type
40 __possible_permissions__ = ("view", "update_reports")
41
42 resource_id = sa.Column(
43 sa.Integer(),
44 sa.ForeignKey("resources.resource_id", onupdate="CASCADE", ondelete="CASCADE"),
45 primary_key=True,
46 )
47 domains = sa.Column(sa.UnicodeText(), nullable=False, default="")
48 api_key = sa.Column(
49 sa.String(32), nullable=False, unique=True, index=True, default=generate_api_key
50 )
51 public_key = sa.Column(
52 sa.String(32), nullable=False, unique=True, index=True, default=generate_api_key
53 )
54 default_grouping = sa.Column(
55 sa.Unicode(20), nullable=False, default="url_traceback"
56 )
40 __possible_permissions__ = ('view', 'update_reports')
41
42 resource_id = sa.Column(sa.Integer(),
43 sa.ForeignKey('resources.resource_id',
44 onupdate='CASCADE',
45 ondelete='CASCADE', ),
46 primary_key=True, )
47 domains = sa.Column(sa.UnicodeText(), nullable=False, default='')
48 api_key = sa.Column(sa.String(32), nullable=False, unique=True, index=True,
49 default=generate_api_key)
50 public_key = sa.Column(sa.String(32), nullable=False, unique=True,
51 index=True,
52 default=generate_api_key)
53 default_grouping = sa.Column(sa.Unicode(20), nullable=False,
54 default='url_traceback')
57 55 error_report_threshold = sa.Column(sa.Integer(), default=10)
58 56 slow_report_threshold = sa.Column(sa.Integer(), default=10)
59 allow_permanent_storage = sa.Column(sa.Boolean(), default=False, nullable=False)
57 allow_permanent_storage = sa.Column(sa.Boolean(), default=False,
58 nullable=False)
60 59
61 @sa.orm.validates("default_grouping")
60 @sa.orm.validates('default_grouping')
62 61 def validate_default_grouping(self, key, grouping):
63 62 """ validate if resouce can have specific permission """
64 assert grouping in ["url_type", "url_traceback", "traceback_server"]
63 assert grouping in ['url_type', 'url_traceback', 'traceback_server']
65 64 return grouping
66 65
67 report_groups = sa.orm.relationship(
68 "ReportGroup",
69 cascade="all, delete-orphan",
70 passive_deletes=True,
71 passive_updates=True,
72 lazy="dynamic",
73 backref=sa.orm.backref("application", lazy="joined"),
74 )
75
76 postprocess_conf = sa.orm.relationship(
77 "ApplicationPostprocessConf",
78 cascade="all, delete-orphan",
79 passive_deletes=True,
80 passive_updates=True,
81 backref="resource",
82 )
83
84 logs = sa.orm.relationship(
85 "Log",
86 lazy="dynamic",
87 backref="application",
88 passive_deletes=True,
89 passive_updates=True,
90 )
91
92 integrations = sa.orm.relationship(
93 "IntegrationBase",
94 backref="resource",
95 cascade="all, delete-orphan",
96 passive_deletes=True,
97 passive_updates=True,
98 )
66 report_groups = sa.orm.relationship('ReportGroup',
67 cascade="all, delete-orphan",
68 passive_deletes=True,
69 passive_updates=True,
70 lazy='dynamic',
71 backref=sa.orm.backref('application',
72 lazy="joined"))
73
74 postprocess_conf = sa.orm.relationship('ApplicationPostprocessConf',
75 cascade="all, delete-orphan",
76 passive_deletes=True,
77 passive_updates=True,
78 backref='resource')
79
80 logs = sa.orm.relationship('Log',
81 lazy='dynamic',
82 backref='application',
83 passive_deletes=True,
84 passive_updates=True, )
85
86 integrations = sa.orm.relationship('IntegrationBase',
87 backref='resource',
88 cascade="all, delete-orphan",
89 passive_deletes=True,
90 passive_updates=True, )
99 91
100 92 def generate_api_key(self):
101 93 return generate_api_key()
102 94
103 95
104 96 def after_update(mapper, connection, target):
105 97 from appenlight.models.services.application import ApplicationService
106
107 log.info("clearing out ApplicationService cache")
98 log.info('clearing out ApplicationService cache')
108 99 ApplicationService.by_id_cached().invalidate(target.resource_id)
109 100 ApplicationService.by_api_key_cached().invalidate(target.api_key)
110 101
111 102
112 sa.event.listen(Application, "after_update", after_update)
113 sa.event.listen(Application, "after_delete", after_update)
103 sa.event.listen(Application, 'after_update', after_update)
104 sa.event.listen(Application, 'after_delete', after_update)
@@ -1,47 +1,45 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors
4 4 #
5 5 # Licensed under the Apache License, Version 2.0 (the "License");
6 6 # you may not use this file except in compliance with the License.
7 7 # You may obtain a copy of the License at
8 8 #
9 9 # http://www.apache.org/licenses/LICENSE-2.0
10 10 #
11 11 # Unless required by applicable law or agreed to in writing, software
12 12 # distributed under the License is distributed on an "AS IS" BASIS,
13 13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 14 # See the License for the specific language governing permissions and
15 15 # limitations under the License.
16 16
17 17 from ziggurat_foundations.models.base import BaseModel
18 18 import sqlalchemy as sa
19 19
20 20 from appenlight.models import Base
21 21 from appenlight.models.report_group import ReportGroup
22 22
23 23
24 24 class ApplicationPostprocessConf(Base, BaseModel):
25 25 """
26 26 Stores prioritizing conditions for reports
27 27 This is later used for rule parsing like "if 10 occurences bump priority +1"
28 28 """
29 29
30 __tablename__ = "application_postprocess_conf"
30 __tablename__ = 'application_postprocess_conf'
31 31
32 32 pkey = sa.Column(sa.Integer(), nullable=False, primary_key=True)
33 resource_id = sa.Column(
34 sa.Integer(),
35 sa.ForeignKey("resources.resource_id", onupdate="CASCADE", ondelete="CASCADE"),
36 )
33 resource_id = sa.Column(sa.Integer(),
34 sa.ForeignKey('resources.resource_id',
35 onupdate='CASCADE',
36 ondelete='CASCADE'))
37 37 do = sa.Column(sa.Unicode(25), nullable=False)
38 new_value = sa.Column(sa.UnicodeText(), nullable=False, default="")
39 rule = sa.Column(
40 sa.dialects.postgresql.JSON,
41 nullable=False,
42 default={"field": "http_status", "op": "ge", "value": "500"},
43 )
38 new_value = sa.Column(sa.UnicodeText(), nullable=False, default='')
39 rule = sa.Column(sa.dialects.postgresql.JSON,
40 nullable=False, default={'field': 'http_status',
41 "op": "ge", "value": "500"})
44 42
45 43 def postprocess(self, item):
46 44 new_value = int(self.new_value)
47 45 item.priority = ReportGroup.priority + new_value
@@ -1,57 +1,52 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors
4 4 #
5 5 # Licensed under the Apache License, Version 2.0 (the "License");
6 6 # you may not use this file except in compliance with the License.
7 7 # You may obtain a copy of the License at
8 8 #
9 9 # http://www.apache.org/licenses/LICENSE-2.0
10 10 #
11 11 # Unless required by applicable law or agreed to in writing, software
12 12 # distributed under the License is distributed on an "AS IS" BASIS,
13 13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 14 # See the License for the specific language governing permissions and
15 15 # limitations under the License.
16 16
17 17 import logging
18 18 import sqlalchemy as sa
19 19
20 20 from datetime import datetime
21 21 from appenlight.models import Base
22 22 from ziggurat_foundations.models.base import BaseModel
23 23 from ziggurat_foundations.models.services.user import UserService
24 24
25 25 log = logging.getLogger(__name__)
26 26
27 27
28 28 class AuthToken(Base, BaseModel):
29 29 """
30 30 Stores information about possible alerting options
31 31 """
32
33 __tablename__ = "auth_tokens"
32 __tablename__ = 'auth_tokens'
34 33
35 34 id = sa.Column(sa.Integer, primary_key=True, nullable=False)
36 token = sa.Column(
37 sa.Unicode(40),
38 nullable=False,
39 default=lambda x: UserService.generate_random_string(40),
40 )
41 owner_id = sa.Column(
42 sa.Unicode(30),
43 sa.ForeignKey("users.id", onupdate="CASCADE", ondelete="CASCADE"),
44 )
35 token = sa.Column(sa.Unicode(40), nullable=False,
36 default=lambda x: UserService.generate_random_string(40))
37 owner_id = sa.Column(sa.Unicode(30),
38 sa.ForeignKey('users.id', onupdate='CASCADE',
39 ondelete='CASCADE'))
45 40 creation_date = sa.Column(sa.DateTime, default=lambda x: datetime.utcnow())
46 41 expires = sa.Column(sa.DateTime)
47 description = sa.Column(sa.Unicode, default="")
42 description = sa.Column(sa.Unicode, default='')
48 43
49 44 @property
50 45 def is_expired(self):
51 46 if self.expires:
52 47 return self.expires < datetime.utcnow()
53 48 else:
54 49 return False
55 50
56 51 def __str__(self):
57 return "<AuthToken u:%s t:%s...>" % (self.owner_id, self.token[0:10])
52 return '<AuthToken u:%s t:%s...>' % (self.owner_id, self.token[0:10])
@@ -1,32 +1,32 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors
4 4 #
5 5 # Licensed under the Apache License, Version 2.0 (the "License");
6 6 # you may not use this file except in compliance with the License.
7 7 # You may obtain a copy of the License at
8 8 #
9 9 # http://www.apache.org/licenses/LICENSE-2.0
10 10 #
11 11 # Unless required by applicable law or agreed to in writing, software
12 12 # distributed under the License is distributed on an "AS IS" BASIS,
13 13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 14 # See the License for the specific language governing permissions and
15 15 # limitations under the License.
16 16
17 17 import sqlalchemy as sa
18 18 from ziggurat_foundations.models.base import BaseModel
19 from sqlalchemy.dialects.postgresql import JSON
19 from sqlalchemy.dialects.postgres import JSON
20 20
21 21 from . import Base
22 22
23 23
24 24 class Config(Base, BaseModel):
25 __tablename__ = "config"
25 __tablename__ = 'config'
26 26
27 27 key = sa.Column(sa.Unicode, primary_key=True)
28 28 section = sa.Column(sa.Unicode, primary_key=True)
29 29 value = sa.Column(JSON, nullable=False)
30 30
31 31 def __json__(self, request):
32 32 return self.get_dict()
@@ -1,170 +1,160 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors
4 4 #
5 5 # Licensed under the Apache License, Version 2.0 (the "License");
6 6 # you may not use this file except in compliance with the License.
7 7 # You may obtain a copy of the License at
8 8 #
9 9 # http://www.apache.org/licenses/LICENSE-2.0
10 10 #
11 11 # Unless required by applicable law or agreed to in writing, software
12 12 # distributed under the License is distributed on an "AS IS" BASIS,
13 13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 14 # See the License for the specific language governing permissions and
15 15 # limitations under the License.
16 16
17 17 import sqlalchemy as sa
18 18 import logging
19 19
20 20 from datetime import datetime
21 21 from appenlight.models import Base, get_db_session
22 22 from appenlight.models.services.report_stat import ReportStatService
23 from appenlight.models.resource import Resource
23 24 from appenlight.models.integrations import IntegrationException
24 25 from pyramid.threadlocal import get_current_request
25 26 from sqlalchemy.dialects.postgresql import JSON
26 27 from ziggurat_foundations.models.base import BaseModel
27 from ziggurat_foundations.models.services.resource import ResourceService
28 28
29 29 log = logging.getLogger(__name__)
30 30
31 31
32 32 class Event(Base, BaseModel):
33 __tablename__ = "events"
33 __tablename__ = 'events'
34 34
35 types = {
36 "error_report_alert": 1,
37 "slow_report_alert": 3,
38 "comment": 5,
39 "assignment": 6,
40 "uptime_alert": 7,
41 "chart_alert": 9,
42 }
35 types = {'error_report_alert': 1,
36 'slow_report_alert': 3,
37 'comment': 5,
38 'assignment': 6,
39 'uptime_alert': 7,
40 'chart_alert': 9}
43 41
44 statuses = {"active": 1, "closed": 0}
42 statuses = {'active': 1,
43 'closed': 0}
45 44
46 45 id = sa.Column(sa.Integer, primary_key=True)
47 46 start_date = sa.Column(sa.DateTime, default=datetime.utcnow)
48 47 end_date = sa.Column(sa.DateTime)
49 48 status = sa.Column(sa.Integer, default=1)
50 49 event_type = sa.Column(sa.Integer, default=1)
51 origin_user_id = sa.Column(sa.Integer(), sa.ForeignKey("users.id"), nullable=True)
52 target_user_id = sa.Column(sa.Integer(), sa.ForeignKey("users.id"), nullable=True)
53 resource_id = sa.Column(
54 sa.Integer(), sa.ForeignKey("resources.resource_id"), nullable=True
55 )
50 origin_user_id = sa.Column(sa.Integer(), sa.ForeignKey('users.id'),
51 nullable=True)
52 target_user_id = sa.Column(sa.Integer(), sa.ForeignKey('users.id'),
53 nullable=True)
54 resource_id = sa.Column(sa.Integer(),
55 sa.ForeignKey('resources.resource_id'),
56 nullable=True)
56 57 target_id = sa.Column(sa.Integer)
57 58 target_uuid = sa.Column(sa.Unicode(40))
58 59 text = sa.Column(sa.UnicodeText())
59 60 values = sa.Column(JSON(), nullable=False, default=None)
60 61
61 62 def __repr__(self):
62 return "<Event %s, app:%s, %s>" % (
63 self.unified_alert_name(),
64 self.resource_id,
65 self.unified_alert_action(),
66 )
63 return '<Event %s, app:%s, %s>' % (self.unified_alert_name(),
64 self.resource_id,
65 self.unified_alert_action())
67 66
68 67 @property
69 68 def reverse_types(self):
70 69 return dict([(v, k) for k, v in self.types.items()])
71 70
72 71 def unified_alert_name(self):
73 72 return self.reverse_types[self.event_type]
74 73
75 74 def unified_alert_action(self):
76 75 event_name = self.reverse_types[self.event_type]
77 if self.status == Event.statuses["closed"]:
76 if self.status == Event.statuses['closed']:
78 77 return "CLOSE"
79 if self.status != Event.statuses["closed"]:
78 if self.status != Event.statuses['closed']:
80 79 return "OPEN"
81 80 return event_name
82 81
83 82 def send_alerts(self, request=None, resource=None, db_session=None):
84 83 """" Sends alerts to applicable channels """
85 84 db_session = get_db_session(db_session)
86 85 db_session.flush()
87 86 if not resource:
88 resource = ResourceService.by_resource_id(self.resource_id)
87 resource = Resource.by_resource_id(self.resource_id)
89 88 if not request:
90 89 request = get_current_request()
91 90 if not resource:
92 91 return
93 users = set([p.user for p in ResourceService.users_for_perm(resource, "view")])
92 users = set([p.user for p in resource.users_for_perm('view')])
94 93 for user in users:
95 94 for channel in user.alert_channels:
96 matches_resource = not channel.resources or resource in [
97 r.resource_id for r in channel.resources
98 ]
95 matches_resource = not channel.resources or resource in [r.resource_id for r in channel.resources]
99 96 if (
100 not channel.channel_validated
101 or not channel.send_alerts
102 or not matches_resource
97 not channel.channel_validated or
98 not channel.send_alerts or
99 not matches_resource
103 100 ):
104 101 continue
105 102 else:
106 103 try:
107 channel.notify_alert(
108 resource=resource, event=self, user=user, request=request
109 )
104 channel.notify_alert(resource=resource,
105 event=self,
106 user=user,
107 request=request)
110 108 except IntegrationException as e:
111 log.warning("%s" % e)
109 log.warning('%s' % e)
112 110
113 111 def validate_or_close(self, since_when, db_session=None):
114 112 """ Checks if alerts should stay open or it's time to close them.
115 113 Generates close alert event if alerts get closed """
116 event_types = [
117 Event.types["error_report_alert"],
118 Event.types["slow_report_alert"],
119 ]
120 app = ResourceService.by_resource_id(self.resource_id)
121 # if app was deleted close instantly
122 if not app:
123 self.close()
124 return
125
114 event_types = [Event.types['error_report_alert'],
115 Event.types['slow_report_alert']]
116 app = Resource.by_resource_id(self.resource_id)
126 117 if self.event_type in event_types:
127 118 total = ReportStatService.count_by_type(
128 self.event_type, self.resource_id, since_when
129 )
130 if Event.types["error_report_alert"] == self.event_type:
119 self.event_type, self.resource_id, since_when)
120 if Event.types['error_report_alert'] == self.event_type:
131 121 threshold = app.error_report_threshold
132 if Event.types["slow_report_alert"] == self.event_type:
122 if Event.types['slow_report_alert'] == self.event_type:
133 123 threshold = app.slow_report_threshold
134 124
135 125 if total < threshold:
136 126 self.close()
137 127
138 128 def close(self, db_session=None):
139 129 """
140 130 Closes an event and sends notification to affected users
141 131 """
142 132 self.end_date = datetime.utcnow()
143 self.status = Event.statuses["closed"]
144 log.warning("ALERT: CLOSE: %s" % self)
133 self.status = Event.statuses['closed']
134 log.warning('ALERT: CLOSE: %s' % self)
145 135 self.send_alerts()
146 136
147 137 def text_representation(self):
148 138 alert_type = self.unified_alert_name()
149 text = ""
150 if "slow_report" in alert_type:
151 text += "Slow report alert"
152 if "error_report" in alert_type:
153 text += "Exception report alert"
154 if "uptime_alert" in alert_type:
155 text += "Uptime alert"
156 if "chart_alert" in alert_type:
157 text += "Metrics value alert"
139 text = ''
140 if 'slow_report' in alert_type:
141 text += 'Slow report alert'
142 if 'error_report' in alert_type:
143 text += 'Exception report alert'
144 if 'uptime_alert' in alert_type:
145 text += 'Uptime alert'
146 if 'chart_alert' in alert_type:
147 text += 'Metrics value alert'
158 148
159 149 alert_action = self.unified_alert_action()
160 if alert_action == "OPEN":
161 text += " got opened."
162 if alert_action == "CLOSE":
163 text += " got closed."
150 if alert_action == 'OPEN':
151 text += ' got opened.'
152 if alert_action == 'CLOSE':
153 text += ' got closed.'
164 154 return text
165 155
166 156 def get_dict(self, request=None):
167 157 dict_data = super(Event, self).get_dict()
168 dict_data["text"] = self.text_representation()
169 dict_data["resource_name"] = self.resource.resource_name
158 dict_data['text'] = self.text_representation()
159 dict_data['resource_name'] = self.resource.resource_name
170 160 return dict_data
@@ -1,36 +1,36 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors
4 4 #
5 5 # Licensed under the Apache License, Version 2.0 (the "License");
6 6 # you may not use this file except in compliance with the License.
7 7 # You may obtain a copy of the License at
8 8 #
9 9 # http://www.apache.org/licenses/LICENSE-2.0
10 10 #
11 11 # Unless required by applicable law or agreed to in writing, software
12 12 # distributed under the License is distributed on an "AS IS" BASIS,
13 13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 14 # See the License for the specific language governing permissions and
15 15 # limitations under the License.
16 16
17 17 import sqlalchemy as sa
18 18 from sqlalchemy.ext.declarative import declared_attr
19 19 from ziggurat_foundations.models.external_identity import ExternalIdentityMixin
20 20
21 21 from appenlight.models import Base
22 22 from appenlight.lib.sqlalchemy_fields import EncryptedUnicode
23 23
24 24
25 25 class ExternalIdentity(ExternalIdentityMixin, Base):
26 26 @declared_attr
27 27 def access_token(self):
28 return sa.Column(EncryptedUnicode(255), default="")
28 return sa.Column(EncryptedUnicode(255), default='')
29 29
30 30 @declared_attr
31 31 def alt_token(self):
32 return sa.Column(EncryptedUnicode(255), default="")
32 return sa.Column(EncryptedUnicode(255), default='')
33 33
34 34 @declared_attr
35 35 def token_secret(self):
36 return sa.Column(EncryptedUnicode(255), default="")
36 return sa.Column(EncryptedUnicode(255), default='')
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
General Comments 2
Under Review
author

Auto status change to "Under Review"

Rejected

Please use: https://github.com/Appenlight/appenlight to contribute :) Thanks !