##// END OF EJS Templates
Closed
Pull request !2235 Created on Sat, 13 Oct 2018 19:27:30, by
  • ini: added new key
  • registration: add a way to disable registration
  • Update register.jinja2
  • alert_channels: it should also be a pkey
  • alert_channels: allow binding to resources
Pull request versions not available.
ver Time Author Commit Description
13 commits hidden, click expand to show them.

The requested changes are too big and content was truncated. Show full diff

@@ -0,0 +1,7 b''
1 [bumpversion]
2 current_version = 1.1.0
3 message = release: Bump version {current_version} to {new_version}
4 tag_name = {new_version}
5
6 [bumpversion:file:backend/src/appenlight/VERSION]
7
@@ -0,0 +1,161 b''
1 # Created by .ignore support plugin (hsz.mobi)
2 syntax: glob
3
4 ### Example user template template
5 ### Example user template
6
7 .venv
8 # IntelliJ project files
9 .idea
10 *.iml
11 out
12 gen### Python template
13 # Byte-compiled / optimized / DLL files
14 __pycache__/
15 *.py[cod]
16 *$py.class
17
18 # C extensions
19 *.so
20
21 # Distribution / packaging
22 .Python
23 env/
24 build/
25 develop-eggs/
26 dist/
27 downloads/
28 eggs/
29 .eggs/
30 $lib
31 lib64/
32 parts/
33 sdist/
34 var/
35 *.egg-info/
36 .installed.cfg
37 *.egg
38
39 # PyInstaller
40 # Usually these files are written by a python script from a template
41 # before PyInstaller builds the exe, so as to inject date/other infos into it.
42 *.manifest
43 *.spec
44
45 # Installer logs
46 pip-log.txt
47 pip-delete-this-directory.txt
48
49 # Unit test / coverage reports
50 htmlcov/
51 .tox/
52 .coverage
53 .coverage.*
54 .cache
55 nosetests.xml
56 coverage.xml
57 *,cover
58 .hypothesis/
59
60 # Translations
61 *.mo
62 *.pot
63
64 # Mac stuff:
65 *.DS_Store
66
67 # Django stuff:
68 *.log
69 local_settings.py
70
71 # Flask instance folder
72 instance/
73
74 # Scrapy stuff:
75 .scrapy
76
77 # Sphinx documentation
78 docs/_build/
79
80 # PyBuilder
81 target/
82
83 # IPython Notebook
84 .ipynb_checkpoints
85
86 # pyenv
87 .python-version
88
89 # celery beat schedule file
90 celerybeat-schedule
91
92 # dotenv
93 .env
94
95 # virtualenv
96 venv/
97 ENV/
98
99 # Spyder project settings
100 .spyderproject
101
102 # Rope project settings
103 .ropeproject
104
105
106 syntax: regexp
107 ^\.idea$
108 syntax: regexp
109 ^\.settings$
110 syntax: regexp
111 ^data$
112 syntax: regexp
113 ^webassets$
114 syntax: regexp
115 ^dist$
116 syntax: regexp
117 ^\.project$
118 syntax: regexp
119 ^\.pydevproject$
120 syntax: regexp
121 ^private$
122 syntax: regexp
123 ^appenlight_frontend/build$
124 syntax: regexp
125 ^appenlight_frontend/bower_components$
126 syntax: regexp
127 ^appenlight_frontend/node_modules$
128 ^src/node_modules$
129 syntax: regexp
130 ^\.pydevproject$
131 syntax: regexp
132 appenlight\.egg-info$
133 syntax: regexp
134 \.pyc$
135 syntax: regexp
136 \celerybeat.*
137 syntax: regexp
138 \.iml$
139 syntax: regexp
140 ^frontend/build$
141 syntax: regexp
142 ^frontend/bower_components$
143 syntax: regexp
144 ^frontend/node_modules$
145 ^frontend/src/node_modules$
146 ^frontend/build$
147
148 syntax: regexp
149 \.db$
150
151 syntax: regexp
152 packer_cache
153
154 syntax: regexp
155 packer/packer
156
157 syntax: regexp
158 install_appenlight_production.yaml
159 ^docs/result$
160 ^docs/Appenlight-docs/_build$
161 ^docs/www$
@@ -0,0 +1,16 b''
1 .. :changelog:
2
3 History
4 -------
5
6 0.9.1 (2016-XX-XX)
7 ++++++++++++++++++
8
9 * Added suppot for "NOT' operator in rule engine
10 * Various bugfixes
11
12
13 0.9.0 (2016-06-29)
14 ++++++++++++++++++
15
16 * first tagged public release
@@ -0,0 +1,3 b''
1 [pip2nix]
2 requirements = ., -r ./requirements.txt
3 output = ./python-packages.nix
This diff has been collapsed as it changes many lines, (1082 lines changed) Show them Hide them
@@ -0,0 +1,1082 b''
1 {
2 Jinja2 = super.buildPythonPackage {
3 name = "Jinja2-2.8";
4 buildInputs = with self; [];
5 doCheck = false;
6 propagatedBuildInputs = with self; [MarkupSafe];
7 src = fetchurl {
8 url = "https://pypi.python.org/packages/f2/2f/0b98b06a345a761bec91a079ccae392d282690c2d8272e708f4d10829e22/Jinja2-2.8.tar.gz";
9 md5 = "edb51693fe22c53cee5403775c71a99e";
10 };
11 meta = {
12 license = [ pkgs.lib.licenses.bsdOriginal ];
13 };
14 };
15 Mako = super.buildPythonPackage {
16 name = "Mako-1.0.4";
17 buildInputs = with self; [];
18 doCheck = false;
19 propagatedBuildInputs = with self; [MarkupSafe];
20 src = fetchurl {
21 url = "https://pypi.python.org/packages/7a/ae/925434246ee90b42e8ef57d3b30a0ab7caf9a2de3e449b876c56dcb48155/Mako-1.0.4.tar.gz";
22 md5 = "c5fc31a323dd4990683d2f2da02d4e20";
23 };
24 meta = {
25 license = [ pkgs.lib.licenses.mit ];
26 };
27 };
28 MarkupSafe = super.buildPythonPackage {
29 name = "MarkupSafe-0.23";
30 buildInputs = with self; [];
31 doCheck = false;
32 propagatedBuildInputs = with self; [];
33 src = fetchurl {
34 url = "https://pypi.python.org/packages/c0/41/bae1254e0396c0cc8cf1751cb7d9afc90a602353695af5952530482c963f/MarkupSafe-0.23.tar.gz";
35 md5 = "f5ab3deee4c37cd6a922fb81e730da6e";
36 };
37 meta = {
38 license = [ pkgs.lib.licenses.bsdOriginal ];
39 };
40 };
41 PasteDeploy = super.buildPythonPackage {
42 name = "PasteDeploy-1.5.2";
43 buildInputs = with self; [];
44 doCheck = false;
45 propagatedBuildInputs = with self; [];
46 src = fetchurl {
47 url = "https://pypi.python.org/packages/0f/90/8e20cdae206c543ea10793cbf4136eb9a8b3f417e04e40a29d72d9922cbd/PasteDeploy-1.5.2.tar.gz";
48 md5 = "352b7205c78c8de4987578d19431af3b";
49 };
50 meta = {
51 license = [ pkgs.lib.licenses.mit ];
52 };
53 };
54 SQLAlchemy = super.buildPythonPackage {
55 name = "SQLAlchemy-1.0.12";
56 buildInputs = with self; [];
57 doCheck = false;
58 propagatedBuildInputs = with self; [];
59 src = fetchurl {
60 url = "https://pypi.python.org/packages/5c/52/9b48cd58eac58cae2a27923ff34c783f390b95413ff65669a86e98f80829/SQLAlchemy-1.0.12.tar.gz";
61 md5 = "6d19ef29883bbebdcac6613cf391cac4";
62 };
63 meta = {
64 license = [ pkgs.lib.licenses.mit ];
65 };
66 };
67 WebOb = super.buildPythonPackage {
68 name = "WebOb-1.6.1";
69 buildInputs = with self; [];
70 doCheck = false;
71 propagatedBuildInputs = with self; [];
72 src = fetchurl {
73 url = "https://pypi.python.org/packages/5d/c7/7c1565b188d8d32bf3657a24b9d71621e35ba20ec4179a0a7f9803511099/WebOb-1.6.1.tar.gz";
74 md5 = "04049d82e9d12dd91f6f46f54cc826aa";
75 };
76 meta = {
77 license = [ pkgs.lib.licenses.mit ];
78 };
79 };
80 alembic = super.buildPythonPackage {
81 name = "alembic-0.8.6";
82 buildInputs = with self; [];
83 doCheck = false;
84 propagatedBuildInputs = with self; [SQLAlchemy Mako python-editor];
85 src = fetchurl {
86 url = "https://pypi.python.org/packages/d2/c3/fdb752aa39832d056aeac958f35f1fb9fb9397a52bdab9248adcbd9f17d9/alembic-0.8.6.tar.gz";
87 md5 = "6517b160e576cedf9b7625a18a9bc594";
88 };
89 meta = {
90 license = [ pkgs.lib.licenses.mit ];
91 };
92 };
93 amqp = super.buildPythonPackage {
94 name = "amqp-1.4.9";
95 buildInputs = with self; [];
96 doCheck = false;
97 propagatedBuildInputs = with self; [];
98 src = fetchurl {
99 url = "https://pypi.python.org/packages/cc/a4/f265c6f9a7eb1dd45d36d9ab775520e07ff575b11ad21156f9866da047b2/amqp-1.4.9.tar.gz";
100 md5 = "df57dde763ba2dea25b3fa92dfe43c19";
101 };
102 meta = {
103 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "LGPL"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
104 };
105 };
106 anyjson = super.buildPythonPackage {
107 name = "anyjson-0.3.3";
108 buildInputs = with self; [];
109 doCheck = false;
110 propagatedBuildInputs = with self; [];
111 src = fetchurl {
112 url = "https://pypi.python.org/packages/c3/4d/d4089e1a3dd25b46bebdb55a992b0797cff657b4477bc32ce28038fdecbc/anyjson-0.3.3.tar.gz";
113 md5 = "2ea28d6ec311aeeebaf993cb3008b27c";
114 };
115 meta = {
116 license = [ pkgs.lib.licenses.bsdOriginal ];
117 };
118 };
119 appenlight = super.buildPythonPackage {
120 name = "appenlight-0.9.0";
121 buildInputs = with self; [];
122 doCheck = false;
123 propagatedBuildInputs = with self; [repoze.sendmail pyramid pyramid-tm pyramid-debugtoolbar pyramid-authstack SQLAlchemy alembic webhelpers2 transaction zope.sqlalchemy pyramid-mailer redis redlock-py pyramid-jinja2 psycopg2 wtforms celery formencode psutil ziggurat-foundations bcrypt appenlight-client markdown colander defusedxml dogpile.cache pyramid-redis-sessions simplejson waitress gunicorn requests requests-oauthlib gevent gevent-websocket pygments lxml paginate paginate-sqlalchemy pyelasticsearch six mock itsdangerous camplight jira python-dateutil authomatic cryptography webassets];
124 src = ./.;
125 meta = {
126 license = [ { fullName = "AGPLv3, and Commercial License"; } ];
127 };
128 };
129 appenlight-client = super.buildPythonPackage {
130 name = "appenlight-client-0.6.17";
131 buildInputs = with self; [];
132 doCheck = false;
133 propagatedBuildInputs = with self; [WebOb requests six];
134 src = fetchurl {
135 url = "https://pypi.python.org/packages/af/86/1075f162d6534080f7f6ed9d8a83254e8f0be90c0a3e7ead9feffbe4423f/appenlight_client-0.6.17.tar.gz";
136 md5 = "2f4d8229ce2dba607a9077210857e0e5";
137 };
138 meta = {
139 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "DFSG approved"; } ];
140 };
141 };
142 authomatic = super.buildPythonPackage {
143 name = "authomatic-0.1.0.post1";
144 buildInputs = with self; [];
145 doCheck = false;
146 propagatedBuildInputs = with self; [];
147 src = fetchurl {
148 url = "https://pypi.python.org/packages/08/1a/8a930461e604c2d5a7a871e1ac59fa82ccf994c32e807230c8d2fb07815a/Authomatic-0.1.0.post1.tar.gz";
149 md5 = "be3f3ce08747d776aae6d6cc8dcb49a9";
150 };
151 meta = {
152 license = [ pkgs.lib.licenses.mit ];
153 };
154 };
155 bcrypt = super.buildPythonPackage {
156 name = "bcrypt-2.0.0";
157 buildInputs = with self; [];
158 doCheck = false;
159 propagatedBuildInputs = with self; [cffi six];
160 src = fetchurl {
161 url = "https://pypi.python.org/packages/11/7d/4c7980d04314466de42ea804db71995c9b3a2a47dc79a63c51f1be0cfd50/bcrypt-2.0.0.tar.gz";
162 md5 = "e7fb17be46904cdb2ae6a062859ee58c";
163 };
164 meta = {
165 license = [ pkgs.lib.licenses.asl20 ];
166 };
167 };
168 billiard = super.buildPythonPackage {
169 name = "billiard-3.3.0.23";
170 buildInputs = with self; [];
171 doCheck = false;
172 propagatedBuildInputs = with self; [];
173 src = fetchurl {
174 url = "https://pypi.python.org/packages/64/a6/d7b6fb7bd0a4680a41f1d4b27061c7b768c673070ba8ac116f865de4e7ca/billiard-3.3.0.23.tar.gz";
175 md5 = "6ee416e1e7c8d8164ce29d7377cca6a4";
176 };
177 meta = {
178 license = [ pkgs.lib.licenses.bsdOriginal ];
179 };
180 };
181 camplight = super.buildPythonPackage {
182 name = "camplight-0.9.6";
183 buildInputs = with self; [];
184 doCheck = false;
185 propagatedBuildInputs = with self; [requests];
186 src = fetchurl {
187 url = "https://pypi.python.org/packages/60/df/bed89a1f1d06632b192eff09a8fa75f85e0080ff70229c8145fbc3b2afa8/camplight-0.9.6.tar.gz";
188 md5 = "716cc7a4ea30da34ae4fcbfe2784ce59";
189 };
190 meta = {
191 license = [ pkgs.lib.licenses.mit ];
192 };
193 };
194 celery = super.buildPythonPackage {
195 name = "celery-3.1.23";
196 buildInputs = with self; [];
197 doCheck = false;
198 propagatedBuildInputs = with self; [pytz billiard kombu];
199 src = fetchurl {
200 url = "https://pypi.python.org/packages/ea/a6/6da0bac3ea8abbc2763fd2664af2955702f97f140f2d7277069445532b7c/celery-3.1.23.tar.gz";
201 md5 = "c6f10f956a49424d553ab1391ab39ab2";
202 };
203 meta = {
204 license = [ pkgs.lib.licenses.bsdOriginal ];
205 };
206 };
207 certifi = super.buildPythonPackage {
208 name = "certifi-2016.8.31";
209 buildInputs = with self; [];
210 doCheck = false;
211 propagatedBuildInputs = with self; [];
212 src = fetchurl {
213 url = "https://pypi.python.org/packages/1c/d1/0133a5084f0d17db0270c6061e824a11b0e417d743f5ff4c594f4090ed89/certifi-2016.8.31.tar.gz";
214 md5 = "2f22d484a36d38d98be74f9eeb2846ec";
215 };
216 meta = {
217 license = [ pkgs.lib.licenses.isc ];
218 };
219 };
220 cffi = super.buildPythonPackage {
221 name = "cffi-1.8.2";
222 buildInputs = with self; [];
223 doCheck = false;
224 propagatedBuildInputs = with self; [pycparser];
225 src = fetchurl {
226 url = "https://pypi.python.org/packages/b8/21/9d6f08d2d36a0a8c84623646b4ed5a07023d868823361a086b021fb21172/cffi-1.8.2.tar.gz";
227 md5 = "538f307b6c5169bba41fbfda2b070762";
228 };
229 meta = {
230 license = [ pkgs.lib.licenses.mit ];
231 };
232 };
233 colander = super.buildPythonPackage {
234 name = "colander-1.2";
235 buildInputs = with self; [];
236 doCheck = false;
237 propagatedBuildInputs = with self; [translationstring iso8601];
238 src = fetchurl {
239 url = "https://pypi.python.org/packages/14/23/c9ceba07a6a1dc0eefbb215fc0dc64aabc2b22ee756bc0f0c13278fa0887/colander-1.2.tar.gz";
240 md5 = "83db21b07936a0726e588dae1914b9ed";
241 };
242 meta = {
243 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
244 };
245 };
246 cryptography = super.buildPythonPackage {
247 name = "cryptography-1.2.3";
248 buildInputs = with self; [];
249 doCheck = false;
250 propagatedBuildInputs = with self; [idna pyasn1 six setuptools enum34 ipaddress cffi];
251 src = fetchurl {
252 url = "https://pypi.python.org/packages/8b/7d/9df253f059c8d9a9389f06df5d6301b0725a44dbf055a1f7aff8e455746a/cryptography-1.2.3.tar.gz";
253 md5 = "5474d2b3e8c7555a60852e48d2743f85";
254 };
255 meta = {
256 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "BSD or Apache License, Version 2.0"; } pkgs.lib.licenses.asl20 ];
257 };
258 };
259 defusedxml = super.buildPythonPackage {
260 name = "defusedxml-0.4.1";
261 buildInputs = with self; [];
262 doCheck = false;
263 propagatedBuildInputs = with self; [];
264 src = fetchurl {
265 url = "https://pypi.python.org/packages/09/3b/b1afa9649f48517d027e99413fec54f387f648c90156b3cf6451c8cd45f9/defusedxml-0.4.1.tar.gz";
266 md5 = "230a5eff64f878b392478e30376d673a";
267 };
268 meta = {
269 license = [ pkgs.lib.licenses.psfl ];
270 };
271 };
272 dogpile.cache = super.buildPythonPackage {
273 name = "dogpile.cache-0.5.7";
274 buildInputs = with self; [];
275 doCheck = false;
276 propagatedBuildInputs = with self; [dogpile.core];
277 src = fetchurl {
278 url = "https://pypi.python.org/packages/07/74/2a83bedf758156d9c95d112691bbad870d3b77ccbcfb781b4ef836ea7d96/dogpile.cache-0.5.7.tar.gz";
279 md5 = "3e58ce41af574aab41d78e9c4190f194";
280 };
281 meta = {
282 license = [ pkgs.lib.licenses.bsdOriginal ];
283 };
284 };
285 dogpile.core = super.buildPythonPackage {
286 name = "dogpile.core-0.4.1";
287 buildInputs = with self; [];
288 doCheck = false;
289 propagatedBuildInputs = with self; [];
290 src = fetchurl {
291 url = "https://pypi.python.org/packages/0e/77/e72abc04c22aedf874301861e5c1e761231c288b5de369c18be8f4b5c9bb/dogpile.core-0.4.1.tar.gz";
292 md5 = "01cb19f52bba3e95c9b560f39341f045";
293 };
294 meta = {
295 license = [ pkgs.lib.licenses.bsdOriginal ];
296 };
297 };
298 elasticsearch = super.buildPythonPackage {
299 name = "elasticsearch-1.9.0";
300 buildInputs = with self; [];
301 doCheck = false;
302 propagatedBuildInputs = with self; [urllib3];
303 src = fetchurl {
304 url = "https://pypi.python.org/packages/13/9b/540e311b31a10c2a904acfb08030c656047e5c7ba479d35df2799e5dccfe/elasticsearch-1.9.0.tar.gz";
305 md5 = "3550390baea1639479f79758d66ab032";
306 };
307 meta = {
308 license = [ pkgs.lib.licenses.asl20 ];
309 };
310 };
311 enum34 = super.buildPythonPackage {
312 name = "enum34-1.1.6";
313 buildInputs = with self; [];
314 doCheck = false;
315 propagatedBuildInputs = with self; [];
316 src = fetchurl {
317 url = "https://pypi.python.org/packages/bf/3e/31d502c25302814a7c2f1d3959d2a3b3f78e509002ba91aea64993936876/enum34-1.1.6.tar.gz";
318 md5 = "5f13a0841a61f7fc295c514490d120d0";
319 };
320 meta = {
321 license = [ pkgs.lib.licenses.bsdOriginal ];
322 };
323 };
324 formencode = super.buildPythonPackage {
325 name = "formencode-1.3.0";
326 buildInputs = with self; [];
327 doCheck = false;
328 propagatedBuildInputs = with self; [];
329 src = fetchurl {
330 url = "https://pypi.python.org/packages/99/5b/f71f36b81b42291a70f61104d0eeb1a30be856a052ebe032c37b45db840c/FormEncode-1.3.0.zip";
331 md5 = "6df12d60bf3179402f2c2efd1129eb74";
332 };
333 meta = {
334 license = [ pkgs.lib.licenses.psfl ];
335 };
336 };
337 gevent = super.buildPythonPackage {
338 name = "gevent-1.1.1";
339 buildInputs = with self; [];
340 doCheck = false;
341 propagatedBuildInputs = with self; [greenlet];
342 src = fetchurl {
343 url = "https://pypi.python.org/packages/12/dc/0b2e57823225de86f6e111a65d212c9e3b64847dddaa19691a6cb94b0b2e/gevent-1.1.1.tar.gz";
344 md5 = "1532f5396ab4d07a231f1935483be7c3";
345 };
346 meta = {
347 license = [ pkgs.lib.licenses.mit ];
348 };
349 };
350 gevent-websocket = super.buildPythonPackage {
351 name = "gevent-websocket-0.9.5";
352 buildInputs = with self; [];
353 doCheck = false;
354 propagatedBuildInputs = with self; [gevent];
355 src = fetchurl {
356 url = "https://pypi.python.org/packages/de/93/6bc86ddd65435a56a2f2ea7cc908d92fea894fc08e364156656e71cc1435/gevent-websocket-0.9.5.tar.gz";
357 md5 = "03a8473b9a61426b0ef6094319141389";
358 };
359 meta = {
360 license = [ { fullName = "Copyright 2011-2013 Jeffrey Gelens <jeffrey@noppo.pro>"; } pkgs.lib.licenses.asl20 ];
361 };
362 };
363 greenlet = super.buildPythonPackage {
364 name = "greenlet-0.4.10";
365 buildInputs = with self; [];
366 doCheck = false;
367 propagatedBuildInputs = with self; [];
368 src = fetchurl {
369 url = "https://pypi.python.org/packages/67/62/ca2a95648666eaa2ffeb6a9b3964f21d419ae27f82f2e66b53da5b943fc4/greenlet-0.4.10.zip";
370 md5 = "bed0c4b3b896702131f4d5c72f87c41d";
371 };
372 meta = {
373 license = [ pkgs.lib.licenses.mit ];
374 };
375 };
376 gunicorn = super.buildPythonPackage {
377 name = "gunicorn-19.4.5";
378 buildInputs = with self; [];
379 doCheck = false;
380 propagatedBuildInputs = with self; [];
381 src = fetchurl {
382 url = "https://pypi.python.org/packages/1e/67/95248e17050822ab436c8a43dbfc0625a8545775737e33b66508cffad278/gunicorn-19.4.5.tar.gz";
383 md5 = "ce45c2dccba58784694dd77f23d9a677";
384 };
385 meta = {
386 license = [ pkgs.lib.licenses.mit ];
387 };
388 };
389 idna = super.buildPythonPackage {
390 name = "idna-2.1";
391 buildInputs = with self; [];
392 doCheck = false;
393 propagatedBuildInputs = with self; [];
394 src = fetchurl {
395 url = "https://pypi.python.org/packages/fb/84/8c27516fbaa8147acd2e431086b473c453c428e24e8fb99a1d89ce381851/idna-2.1.tar.gz";
396 md5 = "f6473caa9c5e0cc1ad3fd5d04c3c114b";
397 };
398 meta = {
399 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "BSD-like"; } ];
400 };
401 };
402 ipaddress = super.buildPythonPackage {
403 name = "ipaddress-1.0.17";
404 buildInputs = with self; [];
405 doCheck = false;
406 propagatedBuildInputs = with self; [];
407 src = fetchurl {
408 url = "https://pypi.python.org/packages/bb/26/3b64955ff73f9e3155079b9ed31812afdfa5333b5c76387454d651ef593a/ipaddress-1.0.17.tar.gz";
409 md5 = "8bbf0326719fafb1f453921ef96729fe";
410 };
411 meta = {
412 license = [ pkgs.lib.licenses.psfl ];
413 };
414 };
415 iso8601 = super.buildPythonPackage {
416 name = "iso8601-0.1.11";
417 buildInputs = with self; [];
418 doCheck = false;
419 propagatedBuildInputs = with self; [];
420 src = fetchurl {
421 url = "https://pypi.python.org/packages/c0/75/c9209ee4d1b5975eb8c2cba4428bde6b61bd55664a98290dd015cdb18e98/iso8601-0.1.11.tar.gz";
422 md5 = "b06d11cd14a64096f907086044f0fe38";
423 };
424 meta = {
425 license = [ pkgs.lib.licenses.mit ];
426 };
427 };
428 itsdangerous = super.buildPythonPackage {
429 name = "itsdangerous-0.24";
430 buildInputs = with self; [];
431 doCheck = false;
432 propagatedBuildInputs = with self; [];
433 src = fetchurl {
434 url = "https://pypi.python.org/packages/dc/b4/a60bcdba945c00f6d608d8975131ab3f25b22f2bcfe1dab221165194b2d4/itsdangerous-0.24.tar.gz";
435 md5 = "a3d55aa79369aef5345c036a8a26307f";
436 };
437 meta = {
438 license = [ pkgs.lib.licenses.bsdOriginal ];
439 };
440 };
441 jira = super.buildPythonPackage {
442 name = "jira-1.0.7";
443 buildInputs = with self; [];
444 doCheck = false;
445 propagatedBuildInputs = with self; [requests requests-oauthlib tlslite six requests-toolbelt];
446 src = fetchurl {
447 url = "https://pypi.python.org/packages/4e/36/4f0ab121c3510fce29743c31e2f47e99c2be68ee4441ad395366489351b0/jira-1.0.7.tar.gz";
448 md5 = "cb1d3f1e1b7a388932ad5d961bf2c56d";
449 };
450 meta = {
451 license = [ pkgs.lib.licenses.bsdOriginal ];
452 };
453 };
454 kombu = super.buildPythonPackage {
455 name = "kombu-3.0.35";
456 buildInputs = with self; [];
457 doCheck = false;
458 propagatedBuildInputs = with self; [anyjson amqp];
459 src = fetchurl {
460 url = "https://pypi.python.org/packages/5f/4f/3859b52f6d465d0d4a767516c924ee4f0e1387498ac8d0c30d9942da3762/kombu-3.0.35.tar.gz";
461 md5 = "6483ac8ba7109ec606f5cb9bd084b6ef";
462 };
463 meta = {
464 license = [ pkgs.lib.licenses.bsdOriginal ];
465 };
466 };
467 lxml = super.buildPythonPackage {
468 name = "lxml-3.6.0";
469 buildInputs = with self; [];
470 doCheck = false;
471 propagatedBuildInputs = with self; [];
472 src = fetchurl {
473 url = "https://pypi.python.org/packages/11/1b/fe6904151b37a0d6da6e60c13583945f8ce3eae8ebd0ec763ce546358947/lxml-3.6.0.tar.gz";
474 md5 = "5957cc384bd6e83934be35c057ec03b6";
475 };
476 meta = {
477 license = [ pkgs.lib.licenses.bsdOriginal ];
478 };
479 };
480 markdown = super.buildPythonPackage {
481 name = "markdown-2.5";
482 buildInputs = with self; [];
483 doCheck = false;
484 propagatedBuildInputs = with self; [];
485 src = fetchurl {
486 url = "https://pypi.python.org/packages/16/7f/034572fbc66f76a626156c9500349f5b384ca1f38194318ddde32bc2fcb0/Markdown-2.5.zip";
487 md5 = "053e5614f7efc06ac0fcd6954678096c";
488 };
489 meta = {
490 license = [ pkgs.lib.licenses.bsdOriginal ];
491 };
492 };
493 mock = super.buildPythonPackage {
494 name = "mock-1.0.1";
495 buildInputs = with self; [];
496 doCheck = false;
497 propagatedBuildInputs = with self; [];
498 src = fetchurl {
499 url = "https://pypi.python.org/packages/15/45/30273ee91feb60dabb8fbb2da7868520525f02cf910279b3047182feed80/mock-1.0.1.zip";
500 md5 = "869f08d003c289a97c1a6610faf5e913";
501 };
502 meta = {
503 license = [ pkgs.lib.licenses.bsdOriginal ];
504 };
505 };
506 oauthlib = super.buildPythonPackage {
507 name = "oauthlib-2.0.0";
508 buildInputs = with self; [];
509 doCheck = false;
510 propagatedBuildInputs = with self; [];
511 src = fetchurl {
512 url = "https://pypi.python.org/packages/ce/92/7f07412a4f04e55c1e83a09c6fd48075b5df96c1dbd4078c3407c5be1dff/oauthlib-2.0.0.tar.gz";
513 md5 = "79b83aa677fc45d1ea28deab7445b4ca";
514 };
515 meta = {
516 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "OSI Approved"; } ];
517 };
518 };
519 paginate = super.buildPythonPackage {
520 name = "paginate-0.5.4";
521 buildInputs = with self; [];
522 doCheck = false;
523 propagatedBuildInputs = with self; [];
524 src = fetchurl {
525 url = "https://pypi.python.org/packages/52/2e/2c3a5647d3f7583355743d73841d03c8b50b97983a478a8f82d3cb9f4a5f/paginate-0.5.4.tar.gz";
526 md5 = "91fdb133f85ac73c6616feba38976e95";
527 };
528 meta = {
529 license = [ pkgs.lib.licenses.mit ];
530 };
531 };
532 paginate-sqlalchemy = super.buildPythonPackage {
533 name = "paginate-sqlalchemy-0.2.0";
534 buildInputs = with self; [];
535 doCheck = false;
536 propagatedBuildInputs = with self; [SQLAlchemy paginate];
537 src = fetchurl {
538 url = "https://pypi.python.org/packages/25/64/fe572514615971fc235e95798ae0e2ee3beeccf43272c623a0a6b082d2d6/paginate_sqlalchemy-0.2.0.tar.gz";
539 md5 = "4ca097c4132f43cd72c6a1795b6bbb5d";
540 };
541 meta = {
542 license = [ pkgs.lib.licenses.mit ];
543 };
544 };
545 passlib = super.buildPythonPackage {
546 name = "passlib-1.6.5";
547 buildInputs = with self; [];
548 doCheck = false;
549 propagatedBuildInputs = with self; [];
550 src = fetchurl {
551 url = "https://pypi.python.org/packages/1e/59/d1a50836b29c87a1bde9442e1846aa11e1548491cbee719e51b45a623e75/passlib-1.6.5.tar.gz";
552 md5 = "d2edd6c42cde136a538b48d90a06ad67";
553 };
554 meta = {
555 license = [ pkgs.lib.licenses.bsdOriginal ];
556 };
557 };
558 psutil = super.buildPythonPackage {
559 name = "psutil-2.1.2";
560 buildInputs = with self; [];
561 doCheck = false;
562 propagatedBuildInputs = with self; [];
563 src = fetchurl {
564 url = "https://pypi.python.org/packages/53/6a/8051b913b2f94eb00fd045fe9e14a7182b6e7f088b12c308edd7616a559b/psutil-2.1.2.tar.gz";
565 md5 = "1969c9b3e256f5ce8fb90c5d0124233e";
566 };
567 meta = {
568 license = [ pkgs.lib.licenses.bsdOriginal ];
569 };
570 };
571 psycopg2 = super.buildPythonPackage {
572 name = "psycopg2-2.6.1";
573 buildInputs = with self; [];
574 doCheck = false;
575 propagatedBuildInputs = with self; [];
576 src = fetchurl {
577 url = "https://pypi.python.org/packages/86/fd/cc8315be63a41fe000cce20482a917e874cdc1151e62cb0141f5e55f711e/psycopg2-2.6.1.tar.gz";
578 md5 = "842b44f8c95517ed5b792081a2370da1";
579 };
580 meta = {
581 license = [ pkgs.lib.licenses.zpt21 { fullName = "GNU Library or Lesser General Public License (LGPL)"; } { fullName = "LGPL with exceptions or ZPL"; } ];
582 };
583 };
584 pyasn1 = super.buildPythonPackage {
585 name = "pyasn1-0.1.9";
586 buildInputs = with self; [];
587 doCheck = false;
588 propagatedBuildInputs = with self; [];
589 src = fetchurl {
590 url = "https://pypi.python.org/packages/f7/83/377e3dd2e95f9020dbd0dfd3c47aaa7deebe3c68d3857a4e51917146ae8b/pyasn1-0.1.9.tar.gz";
591 md5 = "f00a02a631d4016818659d1cc38d229a";
592 };
593 meta = {
594 license = [ pkgs.lib.licenses.bsdOriginal ];
595 };
596 };
597 pycparser = super.buildPythonPackage {
598 name = "pycparser-2.14";
599 buildInputs = with self; [];
600 doCheck = false;
601 propagatedBuildInputs = with self; [];
602 src = fetchurl {
603 url = "https://pypi.python.org/packages/6d/31/666614af3db0acf377876d48688c5d334b6e493b96d21aa7d332169bee50/pycparser-2.14.tar.gz";
604 md5 = "a2bc8d28c923b4fe2b2c3b4b51a4f935";
605 };
606 meta = {
607 license = [ pkgs.lib.licenses.bsdOriginal ];
608 };
609 };
610 pyelasticsearch = super.buildPythonPackage {
611 name = "pyelasticsearch-1.4";
612 buildInputs = with self; [];
613 doCheck = false;
614 propagatedBuildInputs = with self; [certifi elasticsearch urllib3 simplejson six];
615 src = fetchurl {
616 url = "https://pypi.python.org/packages/2f/3a/7643cfcfc4cbdbb20ada800bbd54ac9705d0c047d7b8f8d5eeeb3047b4eb/pyelasticsearch-1.4.tar.gz";
617 md5 = "ed61ebb7b253364e55b4923d11e17049";
618 };
619 meta = {
620 license = [ pkgs.lib.licenses.bsdOriginal ];
621 };
622 };
623 pygments = super.buildPythonPackage {
624 name = "pygments-2.1.3";
625 buildInputs = with self; [];
626 doCheck = false;
627 propagatedBuildInputs = with self; [];
628 src = fetchurl {
629 url = "https://pypi.python.org/packages/b8/67/ab177979be1c81bc99c8d0592ef22d547e70bb4c6815c383286ed5dec504/Pygments-2.1.3.tar.gz";
630 md5 = "ed3fba2467c8afcda4d317e4ef2c6150";
631 };
632 meta = {
633 license = [ pkgs.lib.licenses.bsdOriginal ];
634 };
635 };
636 pyramid = super.buildPythonPackage {
637 name = "pyramid-1.7.3";
638 buildInputs = with self; [];
639 doCheck = false;
640 propagatedBuildInputs = with self; [setuptools WebOb repoze.lru zope.interface zope.deprecation venusian translationstring PasteDeploy];
641 src = fetchurl {
642 url = "https://pypi.python.org/packages/9c/6d/9b9f9acf22c5d221f25cf6756645bce9ea54ee741466197674fe77f2eee3/pyramid-1.7.3.tar.gz";
643 md5 = "5f154c8c352ef013e6e412be02bbb576";
644 };
645 meta = {
646 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
647 };
648 };
649 pyramid-authstack = super.buildPythonPackage {
650 name = "pyramid-authstack-1.0.1";
651 buildInputs = with self; [];
652 doCheck = false;
653 propagatedBuildInputs = with self; [pyramid zope.interface];
654 src = fetchurl {
655 url = "https://pypi.python.org/packages/01/4b/e84cb8fda19f0f03f96231195fd074212b9291f732aa07f90edcfb21ff34/pyramid_authstack-1.0.1.tar.gz";
656 md5 = "8e199862b5a5cd6385f7d5209cee2f12";
657 };
658 meta = {
659 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.mit ];
660 };
661 };
662 pyramid-debugtoolbar = super.buildPythonPackage {
663 name = "pyramid-debugtoolbar-3.0.4";
664 buildInputs = with self; [];
665 doCheck = false;
666 propagatedBuildInputs = with self; [pyramid pyramid-mako repoze.lru pygments];
667 src = fetchurl {
668 url = "https://pypi.python.org/packages/b0/c5/aae5d99983600146875d471aab9142b925fd3596e6e637f6c35d158d09cc/pyramid_debugtoolbar-3.0.4.tar.gz";
669 md5 = "51ff68a733ae994641027f10116e519d";
670 };
671 meta = {
672 license = [ { fullName = "Repoze Public License"; } pkgs.lib.licenses.bsdOriginal ];
673 };
674 };
675 pyramid-jinja2 = super.buildPythonPackage {
676 name = "pyramid-jinja2-2.6.2";
677 buildInputs = with self; [];
678 doCheck = false;
679 propagatedBuildInputs = with self; [pyramid zope.deprecation Jinja2 MarkupSafe];
680 src = fetchurl {
681 url = "https://pypi.python.org/packages/37/00/ac38702305dcf08fe1f1d6d882e8e2d957543bc96c62de52d99d43433c23/pyramid_jinja2-2.6.2.tar.gz";
682 md5 = "10ca075934ebf8f52acfc9898991966d";
683 };
684 meta = {
685 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
686 };
687 };
688 pyramid-mailer = super.buildPythonPackage {
689 name = "pyramid-mailer-0.14.1";
690 buildInputs = with self; [];
691 doCheck = false;
692 propagatedBuildInputs = with self; [pyramid repoze.sendmail];
693 src = fetchurl {
694 url = "https://pypi.python.org/packages/43/02/a32823750dbdee4280090843d5788cc550ab6f24f23fcabbeb7f912bf5fe/pyramid_mailer-0.14.1.tar.gz";
695 md5 = "a589801afdc4a3d64337e4cbd2fc7cdb";
696 };
697 meta = {
698 license = [ pkgs.lib.licenses.bsdOriginal ];
699 };
700 };
701 pyramid-mako = super.buildPythonPackage {
702 name = "pyramid-mako-1.0.2";
703 buildInputs = with self; [];
704 doCheck = false;
705 propagatedBuildInputs = with self; [pyramid Mako];
706 src = fetchurl {
707 url = "https://pypi.python.org/packages/f1/92/7e69bcf09676d286a71cb3bbb887b16595b96f9ba7adbdc239ffdd4b1eb9/pyramid_mako-1.0.2.tar.gz";
708 md5 = "ee25343a97eb76bd90abdc2a774eb48a";
709 };
710 meta = {
711 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
712 };
713 };
714 pyramid-redis-sessions = super.buildPythonPackage {
715 name = "pyramid-redis-sessions-1.0.1";
716 buildInputs = with self; [];
717 doCheck = false;
718 propagatedBuildInputs = with self; [redis pyramid];
719 src = fetchurl {
720 url = "https://pypi.python.org/packages/45/9b/905fd70bb603b61819d525efe7626342ad5f8d033e25fbaedbc53f458c37/pyramid_redis_sessions-1.0.1.tar.gz";
721 md5 = "a39bbfd36f61685eac32d5f4010d3fef";
722 };
723 meta = {
724 license = [ { fullName = "FreeBSD"; } ];
725 };
726 };
727 pyramid-tm = super.buildPythonPackage {
728 name = "pyramid-tm-0.12";
729 buildInputs = with self; [];
730 doCheck = false;
731 propagatedBuildInputs = with self; [pyramid transaction];
732 src = fetchurl {
733 url = "https://pypi.python.org/packages/3e/0b/a0fd3856c8ca2b30f20fcd26627b9cf9d91cd2cfabae42aee3441b2441c5/pyramid_tm-0.12.tar.gz";
734 md5 = "6e5f4449706855fdb7c63d2190e0209b";
735 };
736 meta = {
737 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
738 };
739 };
740 python-dateutil = super.buildPythonPackage {
741 name = "python-dateutil-2.5.3";
742 buildInputs = with self; [];
743 doCheck = false;
744 propagatedBuildInputs = with self; [six];
745 src = fetchurl {
746 url = "https://pypi.python.org/packages/3e/f5/aad82824b369332a676a90a8c0d1e608b17e740bbb6aeeebca726f17b902/python-dateutil-2.5.3.tar.gz";
747 md5 = "05ffc6d2cc85a7fd93bb245807f715ef";
748 };
749 meta = {
750 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "Simplified BSD"; } ];
751 };
752 };
753 python-editor = super.buildPythonPackage {
754 name = "python-editor-1.0.1";
755 buildInputs = with self; [];
756 doCheck = false;
757 propagatedBuildInputs = with self; [];
758 src = fetchurl {
759 url = "https://pypi.python.org/packages/2b/c0/df7b87d5cf016f82eab3b05cd35f53287c1178ad8c42bfb6fa61b89b22f6/python-editor-1.0.1.tar.gz";
760 md5 = "e1fa63535b40e022fa4fd646fd8b511a";
761 };
762 meta = {
763 license = [ pkgs.lib.licenses.asl20 { fullName = "Apache"; } ];
764 };
765 };
766 pytz = super.buildPythonPackage {
767 name = "pytz-2016.6.1";
768 buildInputs = with self; [];
769 doCheck = false;
770 propagatedBuildInputs = with self; [];
771 src = fetchurl {
772 url = "https://pypi.python.org/packages/5d/8e/6635d8f3f9f48c03bb925fab543383089858271f9cfd1216b83247e8df94/pytz-2016.6.1.tar.gz";
773 md5 = "b6c28a3b968bc1d8badfb61b93874e03";
774 };
775 meta = {
776 license = [ pkgs.lib.licenses.mit ];
777 };
778 };
779 redis = super.buildPythonPackage {
780 name = "redis-2.10.5";
781 buildInputs = with self; [];
782 doCheck = false;
783 propagatedBuildInputs = with self; [];
784 src = fetchurl {
785 url = "https://pypi.python.org/packages/68/44/5efe9e98ad83ef5b742ce62a15bea609ed5a0d1caf35b79257ddb324031a/redis-2.10.5.tar.gz";
786 md5 = "3b26c2b9703b4b56b30a1ad508e31083";
787 };
788 meta = {
789 license = [ pkgs.lib.licenses.mit ];
790 };
791 };
792 redlock-py = super.buildPythonPackage {
793 name = "redlock-py-1.0.8";
794 buildInputs = with self; [];
795 doCheck = false;
796 propagatedBuildInputs = with self; [redis];
797 src = fetchurl {
798 url = "https://pypi.python.org/packages/7c/40/29e1730f771b5d27e3c77b5426b6a67a3642868bf8bd592dfa6639feda98/redlock-py-1.0.8.tar.gz";
799 md5 = "7f8fe8ddefbe35deaa64d67ebdf1c58e";
800 };
801 meta = {
802 license = [ pkgs.lib.licenses.mit ];
803 };
804 };
805 repoze.lru = super.buildPythonPackage {
806 name = "repoze.lru-0.6";
807 buildInputs = with self; [];
808 doCheck = false;
809 propagatedBuildInputs = with self; [];
810 src = fetchurl {
811 url = "https://pypi.python.org/packages/6e/1e/aa15cc90217e086dc8769872c8778b409812ff036bf021b15795638939e4/repoze.lru-0.6.tar.gz";
812 md5 = "2c3b64b17a8e18b405f55d46173e14dd";
813 };
814 meta = {
815 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
816 };
817 };
818 repoze.sendmail = super.buildPythonPackage {
819 name = "repoze.sendmail-4.1";
820 buildInputs = with self; [];
821 doCheck = false;
822 propagatedBuildInputs = with self; [setuptools zope.interface transaction];
823 src = fetchurl {
824 url = "https://pypi.python.org/packages/6b/3a/501a897c036c7b728b02a2695998055755e9e71c7e135abdcf200958965e/repoze.sendmail-4.1.tar.gz";
825 md5 = "81d15f1f03cc67d6f56f2091c594ef57";
826 };
827 meta = {
828 license = [ pkgs.lib.licenses.zpt21 ];
829 };
830 };
831 requests = super.buildPythonPackage {
832 name = "requests-2.9.1";
833 buildInputs = with self; [];
834 doCheck = false;
835 propagatedBuildInputs = with self; [];
836 src = fetchurl {
837 url = "https://pypi.python.org/packages/f9/6d/07c44fb1ebe04d069459a189e7dab9e4abfe9432adcd4477367c25332748/requests-2.9.1.tar.gz";
838 md5 = "0b7f480d19012ec52bab78292efd976d";
839 };
840 meta = {
841 license = [ pkgs.lib.licenses.asl20 ];
842 };
843 };
844 requests-oauthlib = super.buildPythonPackage {
845 name = "requests-oauthlib-0.6.1";
846 buildInputs = with self; [];
847 doCheck = false;
848 propagatedBuildInputs = with self; [oauthlib requests];
849 src = fetchurl {
850 url = "https://pypi.python.org/packages/f9/98/a1aaae4bbcde0e98d6d853c4f08bd52f20b0005cefb881679bcdf7ea7a00/requests-oauthlib-0.6.1.tar.gz";
851 md5 = "f159bc7675ebe6a2d76798f4c00c5bf8";
852 };
853 meta = {
854 license = [ pkgs.lib.licenses.isc pkgs.lib.licenses.bsdOriginal ];
855 };
856 };
857 requests-toolbelt = super.buildPythonPackage {
858 name = "requests-toolbelt-0.7.0";
859 buildInputs = with self; [];
860 doCheck = false;
861 propagatedBuildInputs = with self; [requests];
862 src = fetchurl {
863 url = "https://pypi.python.org/packages/59/78/1d391d30ebf74079a8e4de6ab66fdca5362903ef2df64496f4697e9bb626/requests-toolbelt-0.7.0.tar.gz";
864 md5 = "bfe2009905f460f4764c32cfbbf4205f";
865 };
866 meta = {
867 license = [ pkgs.lib.licenses.asl20 ];
868 };
869 };
870 setuptools = super.buildPythonPackage {
871 name = "setuptools-27.2.0";
872 buildInputs = with self; [];
873 doCheck = false;
874 propagatedBuildInputs = with self; [];
875 src = fetchurl {
876 url = "https://pypi.python.org/packages/87/ba/54197971d107bc06f5f3fbdc0d728a7ae0b10cafca46acfddba65a0899d8/setuptools-27.2.0.tar.gz";
877 md5 = "b39715612fdc0372dbfd7b3fcf5d4fe5";
878 };
879 meta = {
880 license = [ pkgs.lib.licenses.mit ];
881 };
882 };
883 simplejson = super.buildPythonPackage {
884 name = "simplejson-3.8.2";
885 buildInputs = with self; [];
886 doCheck = false;
887 propagatedBuildInputs = with self; [];
888 src = fetchurl {
889 url = "https://pypi.python.org/packages/f0/07/26b519e6ebb03c2a74989f7571e6ae6b82e9d7d81b8de6fcdbfc643c7b58/simplejson-3.8.2.tar.gz";
890 md5 = "53b1371bbf883b129a12d594a97e9a18";
891 };
892 meta = {
893 license = [ { fullName = "Academic Free License (AFL)"; } pkgs.lib.licenses.mit ];
894 };
895 };
896 six = super.buildPythonPackage {
897 name = "six-1.9.0";
898 buildInputs = with self; [];
899 doCheck = false;
900 propagatedBuildInputs = with self; [];
901 src = fetchurl {
902 url = "https://pypi.python.org/packages/16/64/1dc5e5976b17466fd7d712e59cbe9fb1e18bec153109e5ba3ed6c9102f1a/six-1.9.0.tar.gz";
903 md5 = "476881ef4012262dfc8adc645ee786c4";
904 };
905 meta = {
906 license = [ pkgs.lib.licenses.mit ];
907 };
908 };
909 tlslite = super.buildPythonPackage {
910 name = "tlslite-0.4.9";
911 buildInputs = with self; [];
912 doCheck = false;
913 propagatedBuildInputs = with self; [];
914 src = fetchurl {
915 url = "https://pypi.python.org/packages/92/2b/7904cf913d9bf150b3e408a92c9cb5ce0b97a9ec19f998af48bf4c607f0e/tlslite-0.4.9.tar.gz";
916 md5 = "9f3b3797f595dd66cd36a65c83a87869";
917 };
918 meta = {
919 license = [ { fullName = "public domain and BSD"; } ];
920 };
921 };
922 transaction = super.buildPythonPackage {
923 name = "transaction-1.4.3";
924 buildInputs = with self; [];
925 doCheck = false;
926 propagatedBuildInputs = with self; [zope.interface];
927 src = fetchurl {
928 url = "https://pypi.python.org/packages/9d/9d/afb5c4904fb41edc14029744ff030ac0596846262bda6145edf23791c880/transaction-1.4.3.tar.gz";
929 md5 = "b4ca5983c9e3a0808ff5ff7648092c76";
930 };
931 meta = {
932 license = [ pkgs.lib.licenses.zpt21 ];
933 };
934 };
935 translationstring = super.buildPythonPackage {
936 name = "translationstring-1.3";
937 buildInputs = with self; [];
938 doCheck = false;
939 propagatedBuildInputs = with self; [];
940 src = fetchurl {
941 url = "https://pypi.python.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
942 md5 = "a4b62e0f3c189c783a1685b3027f7c90";
943 };
944 meta = {
945 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
946 };
947 };
948 urllib3 = super.buildPythonPackage {
949 name = "urllib3-1.17";
950 buildInputs = with self; [];
951 doCheck = false;
952 propagatedBuildInputs = with self; [];
953 src = fetchurl {
954 url = "https://pypi.python.org/packages/c2/79/8851583070bac203561d21b9478340535893f587759608156aaca60a615a/urllib3-1.17.tar.gz";
955 md5 = "12d5520f0fffed0e65cb66b5bdc6ddec";
956 };
957 meta = {
958 license = [ pkgs.lib.licenses.mit ];
959 };
960 };
961 venusian = super.buildPythonPackage {
962 name = "venusian-1.0";
963 buildInputs = with self; [];
964 doCheck = false;
965 propagatedBuildInputs = with self; [];
966 src = fetchurl {
967 url = "https://pypi.python.org/packages/86/20/1948e0dfc4930ddde3da8c33612f6a5717c0b4bc28f591a5c5cf014dd390/venusian-1.0.tar.gz";
968 md5 = "dccf2eafb7113759d60c86faf5538756";
969 };
970 meta = {
971 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
972 };
973 };
974 waitress = super.buildPythonPackage {
975 name = "waitress-1.0.0";
976 buildInputs = with self; [];
977 doCheck = false;
978 propagatedBuildInputs = with self; [];
979 src = fetchurl {
980 url = "https://pypi.python.org/packages/a5/c3/264a56b2470de29f35dda8369886663303c8a2294673b2e6b9975e59f471/waitress-1.0.0.tar.gz";
981 md5 = "b900c4d793e218d77742f47ece58dd43";
982 };
983 meta = {
984 license = [ pkgs.lib.licenses.zpt21 ];
985 };
986 };
987 webassets = super.buildPythonPackage {
988 name = "webassets-0.11.1";
989 buildInputs = with self; [];
990 doCheck = false;
991 propagatedBuildInputs = with self; [];
992 src = fetchurl {
993 url = "https://pypi.python.org/packages/0e/97/f0cd013a3ae074672e9fdfa8629e4071b5cc420a2c82bef5622a87631d1c/webassets-0.11.1.tar.gz";
994 md5 = "6acca51bd12fbdc0399ab1a9b67a1599";
995 };
996 meta = {
997 license = [ pkgs.lib.licenses.bsdOriginal ];
998 };
999 };
1000 webhelpers2 = super.buildPythonPackage {
1001 name = "webhelpers2-2.0";
1002 buildInputs = with self; [];
1003 doCheck = false;
1004 propagatedBuildInputs = with self; [MarkupSafe six];
1005 src = fetchurl {
1006 url = "https://pypi.python.org/packages/ff/30/56342c6ea522439e3662427c8d7b5e5b390dff4ff2dc92d8afcb8ab68b75/WebHelpers2-2.0.tar.gz";
1007 md5 = "0f6b68d70c12ee0aed48c00b24da13d3";
1008 };
1009 meta = {
1010 license = [ pkgs.lib.licenses.mit ];
1011 };
1012 };
1013 wtforms = super.buildPythonPackage {
1014 name = "wtforms-2.1";
1015 buildInputs = with self; [];
1016 doCheck = false;
1017 propagatedBuildInputs = with self; [];
1018 src = fetchurl {
1019 url = "https://pypi.python.org/packages/bf/91/2e553b86c55e9cf2f33265de50e052441fb753af46f5f20477fe9c61280e/WTForms-2.1.zip";
1020 md5 = "6938a541fafd1a1ae2f6b9b88588eef2";
1021 };
1022 meta = {
1023 license = [ pkgs.lib.licenses.bsdOriginal ];
1024 };
1025 };
1026 ziggurat-foundations = super.buildPythonPackage {
1027 name = "ziggurat-foundations-0.6.8";
1028 buildInputs = with self; [];
1029 doCheck = false;
1030 propagatedBuildInputs = with self; [SQLAlchemy passlib paginate paginate-sqlalchemy alembic six];
1031 src = fetchurl {
1032 url = "https://pypi.python.org/packages/b2/3c/f9a0112a30424a58fccdd357338b4559fdda9e1bb3c9611b1ad263abf49e/ziggurat_foundations-0.6.8.tar.gz";
1033 md5 = "d2cc7201667b0e01099456a77726179c";
1034 };
1035 meta = {
1036 license = [ pkgs.lib.licenses.bsdOriginal ];
1037 };
1038 };
1039 zope.deprecation = super.buildPythonPackage {
1040 name = "zope.deprecation-4.1.2";
1041 buildInputs = with self; [];
1042 doCheck = false;
1043 propagatedBuildInputs = with self; [setuptools];
1044 src = fetchurl {
1045 url = "https://pypi.python.org/packages/c1/d3/3919492d5e57d8dd01b36f30b34fc8404a30577392b1eb817c303499ad20/zope.deprecation-4.1.2.tar.gz";
1046 md5 = "e9a663ded58f4f9f7881beb56cae2782";
1047 };
1048 meta = {
1049 license = [ pkgs.lib.licenses.zpt21 ];
1050 };
1051 };
1052 zope.interface = super.buildPythonPackage {
1053 name = "zope.interface-4.3.2";
1054 buildInputs = with self; [];
1055 doCheck = false;
1056 propagatedBuildInputs = with self; [setuptools];
1057 src = fetchurl {
1058 url = "https://pypi.python.org/packages/38/1b/d55c39f2cf442bd9fb2c59760ed058c84b57d25c680819c25f3aff741e1f/zope.interface-4.3.2.tar.gz";
1059 md5 = "5f7e15a5bcdfa3c6c0e93ffe45caf87c";
1060 };
1061 meta = {
1062 license = [ pkgs.lib.licenses.zpt21 ];
1063 };
1064 };
1065 zope.sqlalchemy = super.buildPythonPackage {
1066 name = "zope.sqlalchemy-0.7.6";
1067 buildInputs = with self; [];
1068 doCheck = false;
1069 propagatedBuildInputs = with self; [setuptools SQLAlchemy transaction zope.interface];
1070 src = fetchurl {
1071 url = "https://pypi.python.org/packages/d0/e0/5df0d7f9f1955e2e2edecbb1367cf1fa76bc2f84d700661ffd4161c7e2e9/zope.sqlalchemy-0.7.6.zip";
1072 md5 = "0f5bf14679951e59007e090b6922688c";
1073 };
1074 meta = {
1075 license = [ pkgs.lib.licenses.zpt21 ];
1076 };
1077 };
1078
1079 ### Test requirements
1080
1081
1082 }
@@ -0,0 +1,1 b''
1 1.1.0
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
@@ -126,8 +126,8 b' dist/'
126 downloads/
126 downloads/
127 eggs/
127 eggs/
128 .eggs/
128 .eggs/
129 /lib/
129 lib/
130 /lib64/
130 lib64/
131 parts/
131 parts/
132 sdist/
132 sdist/
133 var/
133 var/
@@ -1,3 +1,8 b''
1 This program is free software: you can redistribute it and/or modify
2 it under the terms of the GNU Affero General Public License, version 3
3 (only), as published by the Free Software Foundation.
4
5
1 This program incorporates work covered by the following copyright and
6 This program incorporates work covered by the following copyright and
2 permission notice:
7 permission notice:
3
8
@@ -1,9 +1,99 b''
1 # AppEnlight
1 AppEnlight
2 -----------
2
3
3 Performance, exception, and uptime monitoring for the Web
4 Automatic Installation
5 ======================
4
6
5 ![AppEnlight image](https://raw.githubusercontent.com/AppEnlight/appenlight/gh-pages/static/appenlight.png)
7 Use the ansible scripts in the `automation` repository to build complete instance of application
8 You can also use `packer` files in `automation/packer` to create whole VM's for KVM and VMWare.
6
9
7 Visit:
10 Manual Installation
11 ===================
8
12
9 [Readme moved to backend directory](backend/README.md)
13 To run the app you need to have meet prerequsites:
14
15 - python 3.5+
16 - running elasticsearch (2.3+/2.4 tested)
17 - running postgresql (9.5+ required)
18 - running redis
19
20 Install the app by performing
21
22 pip install -r requirements.txt
23
24 python setup.py develop
25
26 Install the appenlight uptime plugin (`ae_uptime_ce` package from `appenlight-uptime-ce` repository).
27
28 After installing the application you need to perform following steps:
29
30 1. (optional) generate production.ini (or use a copy of development.ini)
31
32
33 appenlight-make-config production.ini
34
35 2. Setup database structure:
36
37
38 appenlight-migratedb -c FILENAME.ini
39
40 3. To configure elasticsearch:
41
42
43 appenlight-reindex-elasticsearch -t all -c FILENAME.ini
44
45 4. Create base database objects
46
47 (run this command with help flag to see how to create administrator user)
48
49
50 appenlight-initializedb -c FILENAME.ini
51
52 5. Generate static assets
53
54
55 appenlight-static -c FILENAME.ini
56
57 Running application
58 ===================
59
60 To run the main app:
61
62 pserve development.ini
63
64 To run celery workers:
65
66 celery worker -A appenlight.celery -Q "reports,logs,metrics,default" --ini FILENAME.ini
67
68 To run celery beat:
69
70 celery beat -A appenlight.celery --ini FILENAME.ini
71
72 To run appenlight's uptime plugin:
73
74 appenlight-uptime-monitor -c FILENAME.ini
75
76 Real-time Notifications
77 =======================
78
79 You should also run the `channelstream websocket server for real-time notifications
80
81 channelstream -i filename.ini
82
83 Testing
84 =======
85
86 To run test suite:
87
88 py.test appenlight/tests/tests.py --cov appenlight (this looks for testing.ini in repo root)
89
90
91 Development
92 ===========
93
94 To develop appenlight frontend:
95
96 cd frontend
97 npm install
98 bower install
99 grunt watch
@@ -1,2 +1,2 b''
1 include *.txt *.ini *.cfg *.rst *.md VERSION
1 include *.txt *.ini *.cfg *.rst *.md
2 recursive-include src *.ico *.png *.css *.gif *.jpg *.pt *.txt *.mak *.mako *.js *.html *.xml *.jinja2 *.rst *.otf *.ttf *.svg *.woff *.woff2 *.eot
2 recursive-include appenlight *.ico *.png *.css *.gif *.jpg *.pt *.txt *.mak *.mako *.js *.html *.xml *.jinja2 *.rst *.otf *.ttf *.svg *.woff *.eot
@@ -1,47 +1,49 b''
1 repoze.sendmail==4.4.1
1 repoze.sendmail==4.1
2 pyramid==1.10.2
2 pyramid==1.7.3
3 pyramid_tm==2.2.1
3 pyramid_tm==0.12
4 pyramid_debugtoolbar
4 pyramid_debugtoolbar
5 pyramid_authstack==1.0.1
5 pyramid_authstack==1.0.1
6 SQLAlchemy==1.2.18
6 SQLAlchemy==1.0.12
7 alembic==1.0.8
7 alembic==0.8.6
8 webhelpers2==2.0
8 webhelpers2==2.0
9 transaction==2.4.0
9 transaction==1.4.3
10 zope.sqlalchemy==1.1
10 zope.sqlalchemy==0.7.6
11 pyramid_mailer==0.15.1
11 pyramid_mailer==0.14.1
12 redis==3.2.1
12 redis==2.10.5
13 redlock-py==1.0.8
13 redlock-py==1.0.8
14 pyramid_jinja2==2.8
14 pyramid_jinja2==2.6.2
15 psycopg2-binary==2.7.7
15 psycopg2==2.6.1
16 wtforms==2.2.1
16 wtforms==2.1
17 celery==4.2.1
17 celery==3.1.23
18 formencode==1.3.1
18 formencode==1.3.0
19 psutil==5.6.1
19 psutil==2.1.2
20 ziggurat_foundations==0.8.3
20 ziggurat_foundations>=0.6.7
21 bcrypt==3.1.6
21 bcrypt==2.0.0
22 appenlight_client
22 appenlight_client
23 markdown==3.0.1
23 markdown==2.5
24 colander==1.7
24 colander==1.2
25 defusedxml==0.5.0
25 defusedxml==0.4.1
26 dogpile.cache==0.7.1
26 dogpile.cache==0.5.7
27 pyramid_redis_sessions==1.0.1
27 pyramid_redis_sessions==1.0.1
28 simplejson==3.16.0
28 simplejson==3.8.2
29 waitress==1.2.1
29 waitress==1.0
30 gunicorn==19.9.0
30 gunicorn==19.4.5
31 uwsgi==2.0.18
31 requests==2.9.1
32 requests==2.21.0
32 requests_oauthlib==0.6.1
33 requests_oauthlib==1.2.0
33 gevent==1.1.1
34 gevent==1.4.0
34 gevent-websocket==0.9.5
35 pygments==2.3.1
35 pygments==2.1.3
36 lxml==4.3.2
36 lxml==3.6.0
37 paginate==0.5.6
37 paginate==0.5.4
38 paginate-sqlalchemy==0.3.0
38 paginate-sqlalchemy==0.2.0
39 elasticsearch>=6.0.0,<7.0.0
39 pyelasticsearch==1.4
40 six==1.9.0
40 mock==1.0.1
41 mock==1.0.1
41 itsdangerous==1.1.0
42 itsdangerous==0.24
42 camplight==0.9.6
43 camplight==0.9.6
43 jira==1.0.7
44 jira==1.0.7
44 python-dateutil==2.5.3
45 python-dateutil==2.5.3
45 authomatic==0.1.0.post1
46 authomatic==0.1.0.post1
46 cryptography==2.6.1
47 cryptography==1.2.3
48 webassets==0.11.1
47
49
@@ -1,99 +1,77 b''
1 import os
1 import os
2 import sys
2 import re
3 import re
3
4
4 from setuptools import setup, find_packages
5 from setuptools import setup, find_packages
5
6
6 here = os.path.abspath(os.path.dirname(__file__))
7 here = os.path.abspath(os.path.dirname(__file__))
7 README = open(os.path.join(here, "README.md")).read()
8 README = open(os.path.join(here, '..', 'README.md')).read()
8 CHANGES = open(os.path.join(here, "CHANGELOG.md")).read()
9 CHANGES = open(os.path.join(here, 'CHANGELOG.rst')).read()
9
10
10 REQUIREMENTS = open(os.path.join(here, "requirements.txt")).readlines()
11 REQUIREMENTS = open(os.path.join(here, 'requirements.txt')).readlines()
11
12
12 compiled = re.compile("([^=><]*).*")
13 compiled = re.compile('([^=><]*).*')
13
14
14
15
15 def parse_req(req):
16 def parse_req(req):
16 return compiled.search(req).group(1).strip()
17 return compiled.search(req).group(1).strip()
17
18
18
19
19 if "APPENLIGHT_DEVELOP" in os.environ:
20 requires = [_f for _f in map(parse_req, REQUIREMENTS) if _f]
20 requires = [_f for _f in map(parse_req, REQUIREMENTS) if _f]
21 else:
22 requires = REQUIREMENTS
23
21
24
22
25 def _get_meta_var(name, data, callback_handler=None):
23 def _get_meta_var(name, data, callback_handler=None):
26 import re
24 import re
27
25 matches = re.compile(r'(?:%s)\s*=\s*(.*)' % name).search(data)
28 matches = re.compile(r"(?:%s)\s*=\s*(.*)" % name).search(data)
29 if matches:
26 if matches:
30 if not callable(callback_handler):
27 if not callable(callback_handler):
31 callback_handler = lambda v: v
28 callback_handler = lambda v: v
32
29
33 return callback_handler(eval(matches.groups()[0]))
30 return callback_handler(eval(matches.groups()[0]))
34
31
35
32 with open(os.path.join(here, 'src', 'appenlight', '__init__.py'), 'r') as _meta:
36 with open(os.path.join(here, "src", "appenlight", "__init__.py"), "r") as _meta:
37 _metadata = _meta.read()
33 _metadata = _meta.read()
38
34
39 __license__ = _get_meta_var("__license__", _metadata)
35 with open(os.path.join('src', 'appenlight', 'VERSION')) as _meta_version:
40 __author__ = _get_meta_var("__author__", _metadata)
36 __version__ = _meta_version.read().strip()
41 __url__ = _get_meta_var("__url__", _metadata)
37
42
38 __license__ = _get_meta_var('__license__', _metadata)
43 found_packages = find_packages("src")
39 __author__ = _get_meta_var('__author__', _metadata)
44 found_packages.append("appenlight.migrations")
40 __url__ = _get_meta_var('__url__', _metadata)
45 found_packages.append("appenlight.migrations.versions")
41
46 setup(
42 found_packages = find_packages('src')
47 name="appenlight",
43 found_packages.append('appenlight.migrations.versions')
48 description="appenlight",
44 setup(name='appenlight',
49 long_description=README,
45 description='appenlight',
50 classifiers=[
46 long_description=README + '\n\n' + CHANGES,
51 "Framework :: Pyramid",
47 classifiers=[
52 "License :: OSI Approved :: Apache Software License",
48 "Programming Language :: Python",
53 "Programming Language :: Python",
49 "Framework :: Pylons",
54 "Programming Language :: Python :: 3 :: Only",
50 "Topic :: Internet :: WWW/HTTP",
55 "Programming Language :: Python :: 3.6",
51 "Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
56 "Topic :: System :: Monitoring",
52 ],
57 "Topic :: Software Development",
53 version=__version__,
58 "Topic :: Software Development :: Bug Tracking",
54 license=__license__,
59 "Topic :: Internet :: Log Analysis",
55 author=__author__,
60 "Topic :: Internet :: WWW/HTTP",
56 url=__url__,
61 "Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
57 keywords='web wsgi bfg pylons pyramid',
62 ],
58 package_dir={'': 'src'},
63 version="2.0.0rc1",
59 packages=found_packages,
64 license=__license__,
60 include_package_data=True,
65 author=__author__,
61 zip_safe=False,
66 url="https://github.com/AppEnlight/appenlight",
62 test_suite='appenlight',
67 keywords="web wsgi bfg pylons pyramid flask django monitoring apm instrumentation appenlight",
63 install_requires=requires,
68 python_requires=">=3.5",
64 entry_points={
69 long_description_content_type="text/markdown",
65 'paste.app_factory': [
70 package_dir={"": "src"},
66 'main = appenlight:main'
71 packages=found_packages,
67 ],
72 include_package_data=True,
68 'console_scripts': [
73 zip_safe=False,
69 'appenlight-cleanup = appenlight.scripts.cleanup:main',
74 test_suite="appenlight",
70 'appenlight-initializedb = appenlight.scripts.initialize_db:main',
75 install_requires=requires,
71 'appenlight-migratedb = appenlight.scripts.migratedb:main',
76 extras_require={
72 'appenlight-reindex-elasticsearch = appenlight.scripts.reindex_elasticsearch:main',
77 "dev": [
73 'appenlight-static = appenlight.scripts.static:main',
78 "coverage",
74 'appenlight-make-config = appenlight.scripts.make_config:main',
79 "pytest",
75 ]
80 "pyramid",
76 }
81 "tox",
77 )
82 "mock",
83 "pytest-mock",
84 "webtest",
85 ],
86 "lint": ["black"],
87 },
88 entry_points={
89 "paste.app_factory": ["main = appenlight:main"],
90 "console_scripts": [
91 "appenlight-cleanup = appenlight.scripts.cleanup:main",
92 "appenlight-initializedb = appenlight.scripts.initialize_db:main",
93 "appenlight-migratedb = appenlight.scripts.migratedb:main",
94 "appenlight-reindex-elasticsearch = appenlight.scripts.reindex_elasticsearch:main",
95 "appenlight-static = appenlight.scripts.static:main",
96 "appenlight-make-config = appenlight.scripts.make_config:main",
97 ],
98 },
99 )
@@ -16,10 +16,9 b''
16
16
17 import datetime
17 import datetime
18 import logging
18 import logging
19 from elasticsearch import Elasticsearch
19 import pyelasticsearch
20 import redis
20 import redis
21 import os
21 import os
22 import pkg_resources
23 from pkg_resources import iter_entry_points
22 from pkg_resources import iter_entry_points
24
23
25 import appenlight.lib.jinja2_filters as jinja2_filters
24 import appenlight.lib.jinja2_filters as jinja2_filters
@@ -28,7 +27,7 b' import appenlight.lib.encryption as encryption'
28 from pyramid.config import PHASE3_CONFIG
27 from pyramid.config import PHASE3_CONFIG
29 from pyramid.authentication import AuthTktAuthenticationPolicy
28 from pyramid.authentication import AuthTktAuthenticationPolicy
30 from pyramid.authorization import ACLAuthorizationPolicy
29 from pyramid.authorization import ACLAuthorizationPolicy
31 from pyramid_mailer.interfaces import IMailer
30 from pyramid_mailer.mailer import Mailer
32 from pyramid.renderers import JSON
31 from pyramid.renderers import JSON
33 from pyramid_redis_sessions import session_factory_from_settings
32 from pyramid_redis_sessions import session_factory_from_settings
34 from pyramid.settings import asbool, aslist
33 from pyramid.settings import asbool, aslist
@@ -38,18 +37,15 b' from redlock import Redlock'
38 from sqlalchemy import engine_from_config
37 from sqlalchemy import engine_from_config
39
38
40 from appenlight.celery import configure_celery
39 from appenlight.celery import configure_celery
41 from appenlight.lib.configurator import (
40 from appenlight.lib.configurator import (CythonCompatConfigurator,
42 CythonCompatConfigurator,
41 register_appenlight_plugin)
43 register_appenlight_plugin,
44 )
45 from appenlight.lib import cache_regions
42 from appenlight.lib import cache_regions
46 from appenlight.lib.ext_json import json
43 from appenlight.lib.ext_json import json
47 from appenlight.security import groupfinder, AuthTokenAuthenticationPolicy
44 from appenlight.security import groupfinder, AuthTokenAuthenticationPolicy
48
45
49 __license__ = "Apache 2.0"
46 __license__ = 'Apache 2.0'
50 __author__ = "RhodeCode GmbH"
47 __author__ = 'RhodeCode GmbH'
51 __url__ = "http://rhodecode.com"
48 __url__ = 'http://rhodecode.com'
52 __version__ = pkg_resources.get_distribution("appenlight").parsed_version
53
49
54 json_renderer = JSON(serializer=json.dumps, indent=4)
50 json_renderer = JSON(serializer=json.dumps, indent=4)
55
51
@@ -61,7 +57,7 b' def datetime_adapter(obj, request):'
61
57
62
58
63 def all_permissions_adapter(obj, request):
59 def all_permissions_adapter(obj, request):
64 return "__all_permissions__"
60 return '__all_permissions__'
65
61
66
62
67 json_renderer.add_adapter(datetime.datetime, datetime_adapter)
63 json_renderer.add_adapter(datetime.datetime, datetime_adapter)
@@ -72,167 +68,139 b' def main(global_config, **settings):'
72 """ This function returns a Pyramid WSGI application.
68 """ This function returns a Pyramid WSGI application.
73 """
69 """
74 auth_tkt_policy = AuthTktAuthenticationPolicy(
70 auth_tkt_policy = AuthTktAuthenticationPolicy(
75 settings["authtkt.secret"],
71 settings['authtkt.secret'],
76 hashalg="sha512",
72 hashalg='sha512',
77 callback=groupfinder,
73 callback=groupfinder,
78 max_age=2592000,
74 max_age=2592000,
79 secure=asbool(settings.get("authtkt.secure", "false")),
75 secure=asbool(settings.get('authtkt.secure', 'false')))
76 auth_token_policy = AuthTokenAuthenticationPolicy(
77 callback=groupfinder
80 )
78 )
81 auth_token_policy = AuthTokenAuthenticationPolicy(callback=groupfinder)
82 authorization_policy = ACLAuthorizationPolicy()
79 authorization_policy = ACLAuthorizationPolicy()
83 authentication_policy = AuthenticationStackPolicy()
80 authentication_policy = AuthenticationStackPolicy()
84 authentication_policy.add_policy("auth_tkt", auth_tkt_policy)
81 authentication_policy.add_policy('auth_tkt', auth_tkt_policy)
85 authentication_policy.add_policy("auth_token", auth_token_policy)
82 authentication_policy.add_policy('auth_token', auth_token_policy)
86 # set crypto key
83 # set crypto key
87 encryption.ENCRYPTION_SECRET = settings.get("encryption_secret")
84 encryption.ENCRYPTION_SECRET = settings.get('encryption_secret')
88 # import this later so encyption key can be monkeypatched
85 # import this later so encyption key can be monkeypatched
89 from appenlight.models import DBSession, register_datastores
86 from appenlight.models import DBSession, register_datastores
90
87
91 # registration
88 # registration
92 settings["appenlight.disable_registration"] = asbool(
89 settings['appenlight.disable_registration'] = asbool(
93 settings.get("appenlight.disable_registration")
90 settings.get('appenlight.disable_registration'))
94 )
95
91
96 # update config with cometd info
92 # update config with cometd info
97 settings["cometd_servers"] = {
93 settings['cometd_servers'] = {'server': settings['cometd.server'],
98 "server": settings["cometd.server"],
94 'secret': settings['cometd.secret']}
99 "secret": settings["cometd.secret"],
100 }
101
95
102 # Create the Pyramid Configurator.
96 # Create the Pyramid Configurator.
103 settings["_mail_url"] = settings["mailing.app_url"]
97 settings['_mail_url'] = settings['mailing.app_url']
104 config = CythonCompatConfigurator(
98 config = CythonCompatConfigurator(
105 settings=settings,
99 settings=settings,
106 authentication_policy=authentication_policy,
100 authentication_policy=authentication_policy,
107 authorization_policy=authorization_policy,
101 authorization_policy=authorization_policy,
108 root_factory="appenlight.security.RootFactory",
102 root_factory='appenlight.security.RootFactory',
109 default_permission="view",
103 default_permission='view')
110 )
111 # custom registry variables
104 # custom registry variables
112
105
113 # resource type information
106 # resource type information
114 config.registry.resource_types = ["resource", "application"]
107 config.registry.resource_types = ['resource', 'application']
115 # plugin information
108 # plugin information
116 config.registry.appenlight_plugins = {}
109 config.registry.appenlight_plugins = {}
117
110
118 config.set_default_csrf_options(require_csrf=True, header="X-XSRF-TOKEN")
111 config.set_default_csrf_options(require_csrf=True, header='X-XSRF-TOKEN')
119 config.add_view_deriver("appenlight.predicates.csrf_view", name="csrf_view")
112 config.add_view_deriver('appenlight.predicates.csrf_view',
113 name='csrf_view')
120
114
121 # later, when config is available
115 # later, when config is available
122 dogpile_config = {
116 dogpile_config = {'url': settings['redis.url'],
123 "url": settings["redis.url"],
117 "redis_expiration_time": 86400,
124 "redis_expiration_time": 86400,
118 "redis_distributed_lock": True}
125 "redis_distributed_lock": True,
126 }
127 cache_regions.regions = cache_regions.CacheRegions(dogpile_config)
119 cache_regions.regions = cache_regions.CacheRegions(dogpile_config)
128 config.registry.cache_regions = cache_regions.regions
120 config.registry.cache_regions = cache_regions.regions
129 engine = engine_from_config(settings, "sqlalchemy.", json_serializer=json.dumps)
121 engine = engine_from_config(settings, 'sqlalchemy.',
122 json_serializer=json.dumps)
130 DBSession.configure(bind=engine)
123 DBSession.configure(bind=engine)
131
124
132 # json rederer that serializes datetime
125 # json rederer that serializes datetime
133 config.add_renderer("json", json_renderer)
126 config.add_renderer('json', json_renderer)
134 config.add_request_method(
127 config.set_request_property('appenlight.lib.request.es_conn', 'es_conn')
135 "appenlight.lib.request.es_conn", "es_conn", property=True
128 config.set_request_property('appenlight.lib.request.get_user', 'user',
136 )
129 reify=True)
137 config.add_request_method(
130 config.set_request_property('appenlight.lib.request.get_csrf_token',
138 "appenlight.lib.request.get_user", "user", reify=True, property=True
131 'csrf_token', reify=True)
139 )
132 config.set_request_property('appenlight.lib.request.safe_json_body',
140 config.add_request_method(
133 'safe_json_body', reify=True)
141 "appenlight.lib.request.get_csrf_token", "csrf_token", reify=True, property=True
134 config.set_request_property('appenlight.lib.request.unsafe_json_body',
142 )
135 'unsafe_json_body', reify=True)
143 config.add_request_method(
136 config.add_request_method('appenlight.lib.request.add_flash_to_headers',
144 "appenlight.lib.request.safe_json_body",
137 'add_flash_to_headers')
145 "safe_json_body",
138 config.add_request_method('appenlight.lib.request.get_authomatic',
146 reify=True,
139 'authomatic', reify=True)
147 property=True,
140
148 )
141 config.include('pyramid_redis_sessions')
149 config.add_request_method(
142 config.include('pyramid_tm')
150 "appenlight.lib.request.unsafe_json_body",
143 config.include('pyramid_jinja2')
151 "unsafe_json_body",
144 config.include('appenlight_client.ext.pyramid_tween')
152 reify=True,
145 config.include('ziggurat_foundations.ext.pyramid.sign_in')
153 property=True,
146 es_server_list = aslist(settings['elasticsearch.nodes'])
154 )
147 redis_url = settings['redis.url']
155 config.add_request_method(
148 log.warning('Elasticsearch server list: {}'.format(es_server_list))
156 "appenlight.lib.request.add_flash_to_headers", "add_flash_to_headers"
149 log.warning('Redis server: {}'.format(redis_url))
157 )
150 config.registry.es_conn = pyelasticsearch.ElasticSearch(es_server_list)
158 config.add_request_method(
159 "appenlight.lib.request.get_authomatic", "authomatic", reify=True
160 )
161
162 config.include("pyramid_redis_sessions")
163 config.include("pyramid_tm")
164 config.include("pyramid_jinja2")
165 config.include("pyramid_mailer")
166 config.include("appenlight_client.ext.pyramid_tween")
167 config.include("ziggurat_foundations.ext.pyramid.sign_in")
168 es_server_list = aslist(settings["elasticsearch.nodes"])
169 redis_url = settings["redis.url"]
170 log.warning("Elasticsearch server list: {}".format(es_server_list))
171 log.warning("Redis server: {}".format(redis_url))
172 config.registry.es_conn = Elasticsearch(es_server_list)
173 config.registry.redis_conn = redis.StrictRedis.from_url(redis_url)
151 config.registry.redis_conn = redis.StrictRedis.from_url(redis_url)
174
152
175 config.registry.redis_lockmgr = Redlock(
153 config.registry.redis_lockmgr = Redlock([settings['redis.redlock.url'], ],
176 [settings["redis.redlock.url"]], retry_count=0, retry_delay=0
154 retry_count=0, retry_delay=0)
177 )
155 # mailer
178 # mailer bw compat
156 config.registry.mailer = Mailer.from_settings(settings)
179 config.registry.mailer = config.registry.getUtility(IMailer)
180
157
181 # Configure sessions
158 # Configure sessions
182 session_factory = session_factory_from_settings(settings)
159 session_factory = session_factory_from_settings(settings)
183 config.set_session_factory(session_factory)
160 config.set_session_factory(session_factory)
184
161
185 # Configure renderers and event subscribers
162 # Configure renderers and event subscribers
186 config.add_jinja2_extension("jinja2.ext.loopcontrols")
163 config.add_jinja2_extension('jinja2.ext.loopcontrols')
187 config.add_jinja2_search_path("appenlight:templates")
164 config.add_jinja2_search_path('appenlight:templates')
188 # event subscribers
165 # event subscribers
189 config.add_subscriber(
166 config.add_subscriber("appenlight.subscribers.application_created",
190 "appenlight.subscribers.application_created",
167 "pyramid.events.ApplicationCreated")
191 "pyramid.events.ApplicationCreated",
168 config.add_subscriber("appenlight.subscribers.add_renderer_globals",
192 )
169 "pyramid.events.BeforeRender")
193 config.add_subscriber(
170 config.add_subscriber('appenlight.subscribers.new_request',
194 "appenlight.subscribers.add_renderer_globals", "pyramid.events.BeforeRender"
171 'pyramid.events.NewRequest')
195 )
172 config.add_view_predicate('context_type_class',
196 config.add_subscriber(
173 'appenlight.predicates.contextTypeClass')
197 "appenlight.subscribers.new_request", "pyramid.events.NewRequest"
174
198 )
175 register_datastores(es_conn=config.registry.es_conn,
199 config.add_view_predicate(
176 redis_conn=config.registry.redis_conn,
200 "context_type_class", "appenlight.predicates.contextTypeClass"
177 redis_lockmgr=config.registry.redis_lockmgr)
201 )
202
203 register_datastores(
204 es_conn=config.registry.es_conn,
205 redis_conn=config.registry.redis_conn,
206 redis_lockmgr=config.registry.redis_lockmgr,
207 )
208
178
209 # base stuff and scan
179 # base stuff and scan
210
180
211 # need to ensure webassets exists otherwise config.override_asset()
181 # need to ensure webassets exists otherwise config.override_asset()
212 # throws exception
182 # throws exception
213 if not os.path.exists(settings["webassets.dir"]):
183 if not os.path.exists(settings['webassets.dir']):
214 os.mkdir(settings["webassets.dir"])
184 os.mkdir(settings['webassets.dir'])
215 config.add_static_view(
185 config.add_static_view(path='appenlight:webassets',
216 path="appenlight:webassets", name="static", cache_max_age=3600
186 name='static', cache_max_age=3600)
217 )
187 config.override_asset(to_override='appenlight:webassets/',
218 config.override_asset(
188 override_with=settings['webassets.dir'])
219 to_override="appenlight:webassets/", override_with=settings["webassets.dir"]
189
220 )
190 config.include('appenlight.views')
221
191 config.include('appenlight.views.admin')
222 config.include("appenlight.views")
192 config.scan(ignore=['appenlight.migrations', 'appenlight.scripts',
223 config.include("appenlight.views.admin")
193 'appenlight.tests'])
224 config.scan(
194
225 ignore=["appenlight.migrations", "appenlight.scripts", "appenlight.tests"]
195 config.add_directive('register_appenlight_plugin',
226 )
196 register_appenlight_plugin)
227
197
228 config.add_directive("register_appenlight_plugin", register_appenlight_plugin)
198 for entry_point in iter_entry_points(group='appenlight.plugins'):
229
230 for entry_point in iter_entry_points(group="appenlight.plugins"):
231 plugin = entry_point.load()
199 plugin = entry_point.load()
232 plugin.includeme(config)
200 plugin.includeme(config)
233
201
234 # include other appenlight plugins explictly if needed
202 # include other appenlight plugins explictly if needed
235 includes = aslist(settings.get("appenlight.includes", []))
203 includes = aslist(settings.get('appenlight.includes', []))
236 for inc in includes:
204 for inc in includes:
237 config.include(inc)
205 config.include(inc)
238
206
@@ -240,8 +208,8 b' def main(global_config, **settings):'
240
208
241 def pre_commit():
209 def pre_commit():
242 jinja_env = config.get_jinja2_environment()
210 jinja_env = config.get_jinja2_environment()
243 jinja_env.filters["tojson"] = json.dumps
211 jinja_env.filters['tojson'] = json.dumps
244 jinja_env.filters["toJSONUnsafe"] = jinja2_filters.toJSONUnsafe
212 jinja_env.filters['toJSONUnsafe'] = jinja2_filters.toJSONUnsafe
245
213
246 config.action(None, pre_commit, order=PHASE3_CONFIG + 999)
214 config.action(None, pre_commit, order=PHASE3_CONFIG + 999)
247
215
@@ -34,23 +34,15 b' from appenlight_client.ext.celery import register_signals'
34
34
35 log = logging.getLogger(__name__)
35 log = logging.getLogger(__name__)
36
36
37 register(
37 register('date_json', json_dumps, json_loads,
38 "date_json",
38 content_type='application/x-date_json',
39 json_dumps,
39 content_encoding='utf-8')
40 json_loads,
41 content_type="application/x-date_json",
42 content_encoding="utf-8",
43 )
44
40
45 celery = Celery()
41 celery = Celery()
46
42
47 celery.user_options["preload"].add(
43 celery.user_options['preload'].add(
48 Option(
44 Option('--ini', dest='ini', default=None,
49 "--ini",
45 help='Specifies pyramid configuration file location.')
50 dest="ini",
51 default=None,
52 help="Specifies pyramid configuration file location.",
53 )
54 )
46 )
55
47
56
48
@@ -59,21 +51,19 b' def on_preload_parsed(options, **kwargs):'
59 """
51 """
60 This actually configures celery from pyramid config file
52 This actually configures celery from pyramid config file
61 """
53 """
62 celery.conf["INI_PYRAMID"] = options["ini"]
54 celery.conf['INI_PYRAMID'] = options['ini']
63 import appenlight_client.client as e_client
55 import appenlight_client.client as e_client
64
56 ini_location = options['ini']
65 ini_location = options["ini"]
66 if not ini_location:
57 if not ini_location:
67 raise Exception(
58 raise Exception('You need to pass pyramid ini location using '
68 "You need to pass pyramid ini location using "
59 '--ini=filename.ini argument to the worker')
69 "--ini=filename.ini argument to the worker"
60 env = bootstrap(ini_location)
70 )
61 api_key = env['request'].registry.settings['appenlight.api_key']
71 env = bootstrap(ini_location[0])
62 tr_config = env['request'].registry.settings.get(
72 api_key = env["request"].registry.settings["appenlight.api_key"]
63 'appenlight.transport_config')
73 tr_config = env["request"].registry.settings.get("appenlight.transport_config")
64 CONFIG = e_client.get_config({'appenlight.api_key': api_key})
74 CONFIG = e_client.get_config({"appenlight.api_key": api_key})
75 if tr_config:
65 if tr_config:
76 CONFIG["appenlight.transport_config"] = tr_config
66 CONFIG['appenlight.transport_config'] = tr_config
77 APPENLIGHT_CLIENT = e_client.Client(CONFIG)
67 APPENLIGHT_CLIENT = e_client.Client(CONFIG)
78 # log.addHandler(APPENLIGHT_CLIENT.log_handler)
68 # log.addHandler(APPENLIGHT_CLIENT.log_handler)
79 register_signals(APPENLIGHT_CLIENT)
69 register_signals(APPENLIGHT_CLIENT)
@@ -81,101 +71,101 b' def on_preload_parsed(options, **kwargs):'
81
71
82
72
83 celery_config = {
73 celery_config = {
84 "CELERY_IMPORTS": ["appenlight.celery.tasks"],
74 'CELERY_IMPORTS': ["appenlight.celery.tasks", ],
85 "CELERYD_TASK_TIME_LIMIT": 60,
75 'CELERYD_TASK_TIME_LIMIT': 60,
86 "CELERYD_MAX_TASKS_PER_CHILD": 1000,
76 'CELERYD_MAX_TASKS_PER_CHILD': 1000,
87 "CELERY_IGNORE_RESULT": True,
77 'CELERY_IGNORE_RESULT': True,
88 "CELERY_ACCEPT_CONTENT": ["date_json"],
78 'CELERY_ACCEPT_CONTENT': ['date_json'],
89 "CELERY_TASK_SERIALIZER": "date_json",
79 'CELERY_TASK_SERIALIZER': 'date_json',
90 "CELERY_RESULT_SERIALIZER": "date_json",
80 'CELERY_RESULT_SERIALIZER': 'date_json',
91 "BROKER_URL": None,
81 'BROKER_URL': None,
92 "CELERYD_CONCURRENCY": None,
82 'CELERYD_CONCURRENCY': None,
93 "CELERY_TIMEZONE": None,
83 'CELERY_TIMEZONE': None,
94 "CELERYBEAT_SCHEDULE": {
84 'CELERYBEAT_SCHEDULE': {
95 "alerting_reports": {
85 'alerting_reports': {
96 "task": "appenlight.celery.tasks.alerting_reports",
86 'task': 'appenlight.celery.tasks.alerting_reports',
97 "schedule": timedelta(seconds=60),
87 'schedule': timedelta(seconds=60)
98 },
88 },
99 "close_alerts": {
89 'close_alerts': {
100 "task": "appenlight.celery.tasks.close_alerts",
90 'task': 'appenlight.celery.tasks.close_alerts',
101 "schedule": timedelta(seconds=60),
91 'schedule': timedelta(seconds=60)
102 },
92 }
103 },
93 }
104 }
94 }
105 celery.config_from_object(celery_config)
95 celery.config_from_object(celery_config)
106
96
107
97
108 def configure_celery(pyramid_registry):
98 def configure_celery(pyramid_registry):
109 settings = pyramid_registry.settings
99 settings = pyramid_registry.settings
110 celery_config["BROKER_URL"] = settings["celery.broker_url"]
100 celery_config['BROKER_URL'] = settings['celery.broker_url']
111 celery_config["CELERYD_CONCURRENCY"] = settings["celery.concurrency"]
101 celery_config['CELERYD_CONCURRENCY'] = settings['celery.concurrency']
112 celery_config["CELERY_TIMEZONE"] = settings["celery.timezone"]
102 celery_config['CELERY_TIMEZONE'] = settings['celery.timezone']
113
103
114 notifications_seconds = int(
104 notifications_seconds = int(settings.get('tasks.notifications_reports.interval', 60))
115 settings.get("tasks.notifications_reports.interval", 60)
116 )
117
105
118 celery_config["CELERYBEAT_SCHEDULE"]["notifications"] = {
106 celery_config['CELERYBEAT_SCHEDULE']['notifications'] = {
119 "task": "appenlight.celery.tasks.notifications_reports",
107 'task': 'appenlight.celery.tasks.notifications_reports',
120 "schedule": timedelta(seconds=notifications_seconds),
108 'schedule': timedelta(seconds=notifications_seconds)
121 }
109 }
122
110
123 celery_config["CELERYBEAT_SCHEDULE"]["daily_digest"] = {
111 celery_config['CELERYBEAT_SCHEDULE']['daily_digest'] = {
124 "task": "appenlight.celery.tasks.daily_digest",
112 'task': 'appenlight.celery.tasks.daily_digest',
125 "schedule": crontab(minute=1, hour="4,12,20"),
113 'schedule': crontab(minute=1, hour='4,12,20')
126 }
114 }
127
115
128 if asbool(settings.get("celery.always_eager")):
116 if asbool(settings.get('celery.always_eager')):
129 celery_config["CELERY_ALWAYS_EAGER"] = True
117 celery_config['CELERY_ALWAYS_EAGER'] = True
130 celery_config["CELERY_EAGER_PROPAGATES_EXCEPTIONS"] = True
118 celery_config['CELERY_EAGER_PROPAGATES_EXCEPTIONS'] = True
131
119
132 for plugin in pyramid_registry.appenlight_plugins.values():
120 for plugin in pyramid_registry.appenlight_plugins.values():
133 if plugin.get("celery_tasks"):
121 if plugin.get('celery_tasks'):
134 celery_config["CELERY_IMPORTS"].extend(plugin["celery_tasks"])
122 celery_config['CELERY_IMPORTS'].extend(plugin['celery_tasks'])
135 if plugin.get("celery_beats"):
123 if plugin.get('celery_beats'):
136 for name, config in plugin["celery_beats"]:
124 for name, config in plugin['celery_beats']:
137 celery_config["CELERYBEAT_SCHEDULE"][name] = config
125 celery_config['CELERYBEAT_SCHEDULE'][name] = config
138 celery.config_from_object(celery_config)
126 celery.config_from_object(celery_config)
139
127
140
128
141 @task_prerun.connect
129 @task_prerun.connect
142 def task_prerun_signal(task_id, task, args, kwargs, **kwaargs):
130 def task_prerun_signal(task_id, task, args, kwargs, **kwaargs):
143 if hasattr(celery, "pyramid"):
131 if hasattr(celery, 'pyramid'):
144 env = celery.pyramid
132 env = celery.pyramid
145 env = prepare(registry=env["request"].registry)
133 env = prepare(registry=env['request'].registry)
146 proper_base_url = env["request"].registry.settings["mailing.app_url"]
134 proper_base_url = env['request'].registry.settings['mailing.app_url']
147 tmp_req = Request.blank("/", base_url=proper_base_url)
135 tmp_req = Request.blank('/', base_url=proper_base_url)
148 # ensure tasks generate url for right domain from config
136 # ensure tasks generate url for right domain from config
149 env["request"].environ["HTTP_HOST"] = tmp_req.environ["HTTP_HOST"]
137 env['request'].environ['HTTP_HOST'] = tmp_req.environ['HTTP_HOST']
150 env["request"].environ["SERVER_PORT"] = tmp_req.environ["SERVER_PORT"]
138 env['request'].environ['SERVER_PORT'] = tmp_req.environ['SERVER_PORT']
151 env["request"].environ["SERVER_NAME"] = tmp_req.environ["SERVER_NAME"]
139 env['request'].environ['SERVER_NAME'] = tmp_req.environ['SERVER_NAME']
152 env["request"].environ["wsgi.url_scheme"] = tmp_req.environ["wsgi.url_scheme"]
140 env['request'].environ['wsgi.url_scheme'] = \
141 tmp_req.environ['wsgi.url_scheme']
153 get_current_request().tm.begin()
142 get_current_request().tm.begin()
154
143
155
144
156 @task_success.connect
145 @task_success.connect
157 def task_success_signal(result, **kwargs):
146 def task_success_signal(result, **kwargs):
158 get_current_request().tm.commit()
147 get_current_request().tm.commit()
159 if hasattr(celery, "pyramid"):
148 if hasattr(celery, 'pyramid'):
160 celery.pyramid["closer"]()
149 celery.pyramid["closer"]()
161
150
162
151
163 @task_retry.connect
152 @task_retry.connect
164 def task_retry_signal(request, reason, einfo, **kwargs):
153 def task_retry_signal(request, reason, einfo, **kwargs):
165 get_current_request().tm.abort()
154 get_current_request().tm.abort()
166 if hasattr(celery, "pyramid"):
155 if hasattr(celery, 'pyramid'):
167 celery.pyramid["closer"]()
156 celery.pyramid["closer"]()
168
157
169
158
170 @task_failure.connect
159 @task_failure.connect
171 def task_failure_signal(task_id, exception, args, kwargs, traceback, einfo, **kwaargs):
160 def task_failure_signal(task_id, exception, args, kwargs, traceback, einfo,
161 **kwaargs):
172 get_current_request().tm.abort()
162 get_current_request().tm.abort()
173 if hasattr(celery, "pyramid"):
163 if hasattr(celery, 'pyramid'):
174 celery.pyramid["closer"]()
164 celery.pyramid["closer"]()
175
165
176
166
177 @task_revoked.connect
167 @task_revoked.connect
178 def task_revoked_signal(request, terminated, signum, expired, **kwaargs):
168 def task_revoked_signal(request, terminated, signum, expired, **kwaargs):
179 get_current_request().tm.abort()
169 get_current_request().tm.abort()
180 if hasattr(celery, "pyramid"):
170 if hasattr(celery, 'pyramid'):
181 celery.pyramid["closer"]()
171 celery.pyramid["closer"]()
@@ -17,29 +17,38 b''
17 import json
17 import json
18 from datetime import datetime, date, timedelta
18 from datetime import datetime, date, timedelta
19
19
20 DATE_FORMAT = "%Y-%m-%dT%H:%M:%S.%f"
20 DATE_FORMAT = '%Y-%m-%dT%H:%M:%S.%f'
21
21
22
22
23 class DateEncoder(json.JSONEncoder):
23 class DateEncoder(json.JSONEncoder):
24 def default(self, obj):
24 def default(self, obj):
25 if isinstance(obj, datetime):
25 if isinstance(obj, datetime):
26 return {"__type__": "__datetime__", "iso": obj.strftime(DATE_FORMAT)}
26 return {
27 '__type__': '__datetime__',
28 'iso': obj.strftime(DATE_FORMAT)
29 }
27 elif isinstance(obj, date):
30 elif isinstance(obj, date):
28 return {"__type__": "__date__", "iso": obj.strftime(DATE_FORMAT)}
31 return {
32 '__type__': '__date__',
33 'iso': obj.strftime(DATE_FORMAT)
34 }
29 elif isinstance(obj, timedelta):
35 elif isinstance(obj, timedelta):
30 return {"__type__": "__timedelta__", "seconds": obj.total_seconds()}
36 return {
37 '__type__': '__timedelta__',
38 'seconds': obj.total_seconds()
39 }
31 else:
40 else:
32 return json.JSONEncoder.default(self, obj)
41 return json.JSONEncoder.default(self, obj)
33
42
34
43
35 def date_decoder(dct):
44 def date_decoder(dct):
36 if "__type__" in dct:
45 if '__type__' in dct:
37 if dct["__type__"] == "__datetime__":
46 if dct['__type__'] == '__datetime__':
38 return datetime.strptime(dct["iso"], DATE_FORMAT)
47 return datetime.strptime(dct['iso'], DATE_FORMAT)
39 elif dct["__type__"] == "__date__":
48 elif dct['__type__'] == '__date__':
40 return datetime.strptime(dct["iso"], DATE_FORMAT).date()
49 return datetime.strptime(dct['iso'], DATE_FORMAT).date()
41 elif dct["__type__"] == "__timedelta__":
50 elif dct['__type__'] == '__timedelta__':
42 return timedelta(seconds=dct["seconds"])
51 return timedelta(seconds=dct['seconds'])
43 return dct
52 return dct
44
53
45
54
@@ -48,4 +57,4 b' def json_dumps(obj):'
48
57
49
58
50 def json_loads(obj):
59 def json_loads(obj):
51 return json.loads(obj.decode("utf8"), object_hook=date_decoder)
60 return json.loads(obj.decode('utf8'), object_hook=date_decoder)
@@ -20,14 +20,11 b' import math'
20 from datetime import datetime, timedelta
20 from datetime import datetime, timedelta
21
21
22 import sqlalchemy as sa
22 import sqlalchemy as sa
23 import elasticsearch.exceptions
23 import pyelasticsearch
24 import elasticsearch.helpers
25
24
26 from celery.utils.log import get_task_logger
25 from celery.utils.log import get_task_logger
27 from zope.sqlalchemy import mark_changed
26 from zope.sqlalchemy import mark_changed
28 from pyramid.threadlocal import get_current_request, get_current_registry
27 from pyramid.threadlocal import get_current_request, get_current_registry
29 from ziggurat_foundations.models.services.resource import ResourceService
30
31 from appenlight.celery import celery
28 from appenlight.celery import celery
32 from appenlight.models.report_group import ReportGroup
29 from appenlight.models.report_group import ReportGroup
33 from appenlight.models import DBSession, Datastores
30 from appenlight.models import DBSession, Datastores
@@ -51,11 +48,9 b' from appenlight.lib.enums import ReportType'
51
48
52 log = get_task_logger(__name__)
49 log = get_task_logger(__name__)
53
50
54 sample_boundries = (
51 sample_boundries = list(range(100, 1000, 100)) + \
55 list(range(100, 1000, 100))
52 list(range(1000, 10000, 1000)) + \
56 + list(range(1000, 10000, 1000))
53 list(range(10000, 100000, 5000))
57 + list(range(10000, 100000, 5000))
58 )
59
54
60
55
61 def pick_sample(total_occurences, report_type=None):
56 def pick_sample(total_occurences, report_type=None):
@@ -72,9 +67,9 b' def pick_sample(total_occurences, report_type=None):'
72
67
73 @celery.task(queue="default", default_retry_delay=1, max_retries=2)
68 @celery.task(queue="default", default_retry_delay=1, max_retries=2)
74 def test_exception_task():
69 def test_exception_task():
75 log.error("test celery log", extra={"location": "celery"})
70 log.error('test celery log', extra={'location': 'celery'})
76 log.warning("test celery log", extra={"location": "celery"})
71 log.warning('test celery log', extra={'location': 'celery'})
77 raise Exception("Celery exception test")
72 raise Exception('Celery exception test')
78
73
79
74
80 @celery.task(queue="default", default_retry_delay=1, max_retries=2)
75 @celery.task(queue="default", default_retry_delay=1, max_retries=2)
@@ -83,18 +78,16 b' def test_retry_exception_task():'
83 import time
78 import time
84
79
85 time.sleep(1.3)
80 time.sleep(1.3)
86 log.error("test retry celery log", extra={"location": "celery"})
81 log.error('test retry celery log', extra={'location': 'celery'})
87 log.warning("test retry celery log", extra={"location": "celery"})
82 log.warning('test retry celery log', extra={'location': 'celery'})
88 raise Exception("Celery exception test")
83 raise Exception('Celery exception test')
89 except Exception as exc:
84 except Exception as exc:
90 if celery.conf["CELERY_EAGER_PROPAGATES_EXCEPTIONS"]:
91 raise
92 test_retry_exception_task.retry(exc=exc)
85 test_retry_exception_task.retry(exc=exc)
93
86
94
87
95 @celery.task(queue="reports", default_retry_delay=600, max_retries=144)
88 @celery.task(queue="reports", default_retry_delay=600, max_retries=144)
96 def add_reports(resource_id, request_params, dataset, **kwargs):
89 def add_reports(resource_id, request_params, dataset, **kwargs):
97 proto_version = parse_proto(request_params.get("protocol_version", ""))
90 proto_version = parse_proto(request_params.get('protocol_version', ''))
98 current_time = datetime.utcnow().replace(second=0, microsecond=0)
91 current_time = datetime.utcnow().replace(second=0, microsecond=0)
99 try:
92 try:
100 # we will store solr docs here for single insert
93 # we will store solr docs here for single insert
@@ -116,26 +109,22 b' def add_reports(resource_id, request_params, dataset, **kwargs):'
116 report_group = ReportGroupService.by_hash_and_resource(
109 report_group = ReportGroupService.by_hash_and_resource(
117 report.resource_id,
110 report.resource_id,
118 report.grouping_hash,
111 report.grouping_hash,
119 since_when=datetime.utcnow().date().replace(day=1),
112 since_when=datetime.utcnow().date().replace(day=1)
120 )
113 )
121 occurences = report_data.get("occurences", 1)
114 occurences = report_data.get('occurences', 1)
122 if not report_group:
115 if not report_group:
123 # total reports will be +1 moment later
116 # total reports will be +1 moment later
124 report_group = ReportGroup(
117 report_group = ReportGroup(grouping_hash=report.grouping_hash,
125 grouping_hash=report.grouping_hash,
118 occurences=0, total_reports=0,
126 occurences=0,
119 last_report=0,
127 total_reports=0,
120 priority=report.priority,
128 last_report=0,
121 error=report.error,
129 priority=report.priority,
122 first_timestamp=report.start_time)
130 error=report.error,
131 first_timestamp=report.start_time,
132 )
133 report_group._skip_ft_index = True
123 report_group._skip_ft_index = True
134 report_group.report_type = report.report_type
124 report_group.report_type = report.report_type
135 report.report_group_time = report_group.first_timestamp
125 report.report_group_time = report_group.first_timestamp
136 add_sample = pick_sample(
126 add_sample = pick_sample(report_group.occurences,
137 report_group.occurences, report_type=report_group.report_type
127 report_type=report_group.report_type)
138 )
139 if add_sample:
128 if add_sample:
140 resource.report_groups.append(report_group)
129 resource.report_groups.append(report_group)
141 report_group.reports.append(report)
130 report_group.reports.append(report)
@@ -150,26 +139,28 b' def add_reports(resource_id, request_params, dataset, **kwargs):'
150 for s_call in slow_calls:
139 for s_call in slow_calls:
151 if s_call.partition_id not in es_slow_calls_docs:
140 if s_call.partition_id not in es_slow_calls_docs:
152 es_slow_calls_docs[s_call.partition_id] = []
141 es_slow_calls_docs[s_call.partition_id] = []
153 es_slow_calls_docs[s_call.partition_id].append(s_call.es_doc())
142 es_slow_calls_docs[s_call.partition_id].append(
143 s_call.es_doc())
154 # try generating new stat rows if needed
144 # try generating new stat rows if needed
155 else:
145 else:
156 # required for postprocessing to not fail later
146 # required for postprocessing to not fail later
157 report.report_group = report_group
147 report.report_group = report_group
158
148
159 stat_row = ReportService.generate_stat_rows(report, resource, report_group)
149 stat_row = ReportService.generate_stat_rows(
150 report, resource, report_group)
160 if stat_row.partition_id not in es_reports_stats_rows:
151 if stat_row.partition_id not in es_reports_stats_rows:
161 es_reports_stats_rows[stat_row.partition_id] = []
152 es_reports_stats_rows[stat_row.partition_id] = []
162 es_reports_stats_rows[stat_row.partition_id].append(stat_row.es_doc())
153 es_reports_stats_rows[stat_row.partition_id].append(
154 stat_row.es_doc())
163
155
164 # see if we should mark 10th occurence of report
156 # see if we should mark 10th occurence of report
165 last_occurences_10 = int(math.floor(report_group.occurences / 10))
157 last_occurences_10 = int(math.floor(report_group.occurences / 10))
166 curr_occurences_10 = int(
158 curr_occurences_10 = int(math.floor(
167 math.floor((report_group.occurences + report.occurences) / 10)
159 (report_group.occurences + report.occurences) / 10))
168 )
160 last_occurences_100 = int(
169 last_occurences_100 = int(math.floor(report_group.occurences / 100))
161 math.floor(report_group.occurences / 100))
170 curr_occurences_100 = int(
162 curr_occurences_100 = int(math.floor(
171 math.floor((report_group.occurences + report.occurences) / 100)
163 (report_group.occurences + report.occurences) / 100))
172 )
173 notify_occurences_10 = last_occurences_10 != curr_occurences_10
164 notify_occurences_10 = last_occurences_10 != curr_occurences_10
174 notify_occurences_100 = last_occurences_100 != curr_occurences_100
165 notify_occurences_100 = last_occurences_100 != curr_occurences_100
175 report_group.occurences = ReportGroup.occurences + occurences
166 report_group.occurences = ReportGroup.occurences + occurences
@@ -182,47 +173,39 b' def add_reports(resource_id, request_params, dataset, **kwargs):'
182 if added_details:
173 if added_details:
183 report_group.total_reports = ReportGroup.total_reports + 1
174 report_group.total_reports = ReportGroup.total_reports + 1
184 report_group.last_report = report.id
175 report_group.last_report = report.id
185 report_group.set_notification_info(
176 report_group.set_notification_info(notify_10=notify_occurences_10,
186 notify_10=notify_occurences_10, notify_100=notify_occurences_100
177 notify_100=notify_occurences_100)
187 )
188 DBSession.flush()
178 DBSession.flush()
189 report_group.get_report().notify_channel(report_group)
179 report_group.get_report().notify_channel(report_group)
190 if report_group.partition_id not in es_report_group_docs:
180 if report_group.partition_id not in es_report_group_docs:
191 es_report_group_docs[report_group.partition_id] = []
181 es_report_group_docs[report_group.partition_id] = []
192 es_report_group_docs[report_group.partition_id].append(
182 es_report_group_docs[report_group.partition_id].append(
193 report_group.es_doc()
183 report_group.es_doc())
194 )
195
184
196 action = "REPORT"
185 action = 'REPORT'
197 log_msg = "%s: %s %s, client: %s, proto: %s" % (
186 log_msg = '%s: %s %s, client: %s, proto: %s' % (
198 action,
187 action,
199 report_data.get("http_status", "unknown"),
188 report_data.get('http_status', 'unknown'),
200 str(resource),
189 str(resource),
201 report_data.get("client"),
190 report_data.get('client'),
202 proto_version,
191 proto_version)
203 )
204 log.info(log_msg)
192 log.info(log_msg)
205 total_reports = len(dataset)
193 total_reports = len(dataset)
206 redis_pipeline = Datastores.redis.pipeline(transaction=False)
194 redis_pipeline = Datastores.redis.pipeline(transaction=False)
207 key = REDIS_KEYS["counters"]["reports_per_minute"].format(current_time)
195 key = REDIS_KEYS['counters']['reports_per_minute'].format(current_time)
208 redis_pipeline.incr(key, total_reports)
196 redis_pipeline.incr(key, total_reports)
209 redis_pipeline.expire(key, 3600 * 24)
197 redis_pipeline.expire(key, 3600 * 24)
210 key = REDIS_KEYS["counters"]["events_per_minute_per_user"].format(
198 key = REDIS_KEYS['counters']['events_per_minute_per_user'].format(
211 resource.owner_user_id, current_time
199 resource.owner_user_id, current_time)
212 )
213 redis_pipeline.incr(key, total_reports)
200 redis_pipeline.incr(key, total_reports)
214 redis_pipeline.expire(key, 3600)
201 redis_pipeline.expire(key, 3600)
215 key = REDIS_KEYS["counters"]["reports_per_hour_per_app"].format(
202 key = REDIS_KEYS['counters']['reports_per_hour_per_app'].format(
216 resource_id, current_time.replace(minute=0)
203 resource_id, current_time.replace(minute=0))
217 )
218 redis_pipeline.incr(key, total_reports)
204 redis_pipeline.incr(key, total_reports)
219 redis_pipeline.expire(key, 3600 * 24 * 7)
205 redis_pipeline.expire(key, 3600 * 24 * 7)
220 redis_pipeline.sadd(
206 redis_pipeline.sadd(
221 REDIS_KEYS["apps_that_got_new_data_per_hour"].format(
207 REDIS_KEYS['apps_that_got_new_data_per_hour'].format(
222 current_time.replace(minute=0)
208 current_time.replace(minute=0)), resource_id)
223 ),
224 resource_id,
225 )
226 redis_pipeline.execute()
209 redis_pipeline.execute()
227
210
228 add_reports_es(es_report_group_docs, es_report_docs)
211 add_reports_es(es_report_group_docs, es_report_docs)
@@ -231,42 +214,33 b' def add_reports(resource_id, request_params, dataset, **kwargs):'
231 return True
214 return True
232 except Exception as exc:
215 except Exception as exc:
233 print_traceback(log)
216 print_traceback(log)
234 if celery.conf["CELERY_EAGER_PROPAGATES_EXCEPTIONS"]:
235 raise
236 add_reports.retry(exc=exc)
217 add_reports.retry(exc=exc)
237
218
238
219
239 @celery.task(queue="es", default_retry_delay=600, max_retries=144)
220 @celery.task(queue="es", default_retry_delay=600, max_retries=144)
240 def add_reports_es(report_group_docs, report_docs):
221 def add_reports_es(report_group_docs, report_docs):
241 for k, v in report_group_docs.items():
222 for k, v in report_group_docs.items():
242 to_update = {"_index": k, "_type": "report"}
223 Datastores.es.bulk_index(k, 'report_group', v, id_field="_id")
243 [i.update(to_update) for i in v]
244 elasticsearch.helpers.bulk(Datastores.es, v)
245 for k, v in report_docs.items():
224 for k, v in report_docs.items():
246 to_update = {"_index": k, "_type": "report"}
225 Datastores.es.bulk_index(k, 'report', v, id_field="_id",
247 [i.update(to_update) for i in v]
226 parent_field='_parent')
248 elasticsearch.helpers.bulk(Datastores.es, v)
249
227
250
228
251 @celery.task(queue="es", default_retry_delay=600, max_retries=144)
229 @celery.task(queue="es", default_retry_delay=600, max_retries=144)
252 def add_reports_slow_calls_es(es_docs):
230 def add_reports_slow_calls_es(es_docs):
253 for k, v in es_docs.items():
231 for k, v in es_docs.items():
254 to_update = {"_index": k, "_type": "log"}
232 Datastores.es.bulk_index(k, 'log', v)
255 [i.update(to_update) for i in v]
256 elasticsearch.helpers.bulk(Datastores.es, v)
257
233
258
234
259 @celery.task(queue="es", default_retry_delay=600, max_retries=144)
235 @celery.task(queue="es", default_retry_delay=600, max_retries=144)
260 def add_reports_stats_rows_es(es_docs):
236 def add_reports_stats_rows_es(es_docs):
261 for k, v in es_docs.items():
237 for k, v in es_docs.items():
262 to_update = {"_index": k, "_type": "report"}
238 Datastores.es.bulk_index(k, 'log', v)
263 [i.update(to_update) for i in v]
264 elasticsearch.helpers.bulk(Datastores.es, v)
265
239
266
240
267 @celery.task(queue="logs", default_retry_delay=600, max_retries=144)
241 @celery.task(queue="logs", default_retry_delay=600, max_retries=144)
268 def add_logs(resource_id, request_params, dataset, **kwargs):
242 def add_logs(resource_id, request_params, dataset, **kwargs):
269 proto_version = request_params.get("protocol_version")
243 proto_version = request_params.get('protocol_version')
270 current_time = datetime.utcnow().replace(second=0, microsecond=0)
244 current_time = datetime.utcnow().replace(second=0, microsecond=0)
271
245
272 try:
246 try:
@@ -276,25 +250,25 b' def add_logs(resource_id, request_params, dataset, **kwargs):'
276 ns_pairs = []
250 ns_pairs = []
277 for entry in dataset:
251 for entry in dataset:
278 # gather pk and ns so we can remove older versions of row later
252 # gather pk and ns so we can remove older versions of row later
279 if entry["primary_key"] is not None:
253 if entry['primary_key'] is not None:
280 ns_pairs.append({"pk": entry["primary_key"], "ns": entry["namespace"]})
254 ns_pairs.append({"pk": entry['primary_key'],
255 "ns": entry['namespace']})
281 log_entry = Log()
256 log_entry = Log()
282 log_entry.set_data(entry, resource=resource)
257 log_entry.set_data(entry, resource=resource)
283 log_entry._skip_ft_index = True
258 log_entry._skip_ft_index = True
284 resource.logs.append(log_entry)
259 resource.logs.append(log_entry)
285 DBSession.flush()
260 DBSession.flush()
286 # insert non pk rows first
261 # insert non pk rows first
287 if entry["primary_key"] is None:
262 if entry['primary_key'] is None:
288 es_docs[log_entry.partition_id].append(log_entry.es_doc())
263 es_docs[log_entry.partition_id].append(log_entry.es_doc())
289
264
290 # 2nd pass to delete all log entries from db for same pk/ns pair
265 # 2nd pass to delete all log entries from db foe same pk/ns pair
291 if ns_pairs:
266 if ns_pairs:
292 ids_to_delete = []
267 ids_to_delete = []
293 es_docs = collections.defaultdict(list)
268 es_docs = collections.defaultdict(list)
294 es_docs_to_delete = collections.defaultdict(list)
269 es_docs_to_delete = collections.defaultdict(list)
295 found_pkey_logs = LogService.query_by_primary_key_and_namespace(
270 found_pkey_logs = LogService.query_by_primary_key_and_namespace(
296 list_of_pairs=ns_pairs
271 list_of_pairs=ns_pairs)
297 )
298 log_dict = {}
272 log_dict = {}
299 for log_entry in found_pkey_logs:
273 for log_entry in found_pkey_logs:
300 log_key = (log_entry.primary_key, log_entry.namespace)
274 log_key = (log_entry.primary_key, log_entry.namespace)
@@ -311,75 +285,63 b' def add_logs(resource_id, request_params, dataset, **kwargs):'
311 ids_to_delete.append(e.log_id)
285 ids_to_delete.append(e.log_id)
312 es_docs_to_delete[e.partition_id].append(e.delete_hash)
286 es_docs_to_delete[e.partition_id].append(e.delete_hash)
313
287
314 es_docs_to_delete[log_entry.partition_id].append(log_entry.delete_hash)
288 es_docs_to_delete[log_entry.partition_id].append(
289 log_entry.delete_hash)
315
290
316 es_docs[log_entry.partition_id].append(log_entry.es_doc())
291 es_docs[log_entry.partition_id].append(log_entry.es_doc())
317
292
318 if ids_to_delete:
293 if ids_to_delete:
319 query = DBSession.query(Log).filter(Log.log_id.in_(ids_to_delete))
294 query = DBSession.query(Log).filter(
295 Log.log_id.in_(ids_to_delete))
320 query.delete(synchronize_session=False)
296 query.delete(synchronize_session=False)
321 if es_docs_to_delete:
297 if es_docs_to_delete:
322 # batch this to avoid problems with default ES bulk limits
298 # batch this to avoid problems with default ES bulk limits
323 for es_index in es_docs_to_delete.keys():
299 for es_index in es_docs_to_delete.keys():
324 for batch in in_batches(es_docs_to_delete[es_index], 20):
300 for batch in in_batches(es_docs_to_delete[es_index], 20):
325 query = {"query": {"terms": {"delete_hash": batch}}}
301 query = {'terms': {'delete_hash': batch}}
326
302
327 try:
303 try:
328 Datastores.es.delete_by_query(
304 Datastores.es.delete_by_query(
329 index=es_index,
305 es_index, 'log', query)
330 doc_type="log",
306 except pyelasticsearch.ElasticHttpNotFoundError as exc:
331 body=query,
307 msg = 'skipping index {}'.format(es_index)
332 conflicts="proceed",
333 )
334 except elasticsearch.exceptions.NotFoundError as exc:
335 msg = "skipping index {}".format(es_index)
336 log.info(msg)
308 log.info(msg)
337
309
338 total_logs = len(dataset)
310 total_logs = len(dataset)
339
311
340 log_msg = "LOG_NEW: %s, entries: %s, proto:%s" % (
312 log_msg = 'LOG_NEW: %s, entries: %s, proto:%s' % (
341 str(resource),
313 str(resource),
342 total_logs,
314 total_logs,
343 proto_version,
315 proto_version)
344 )
345 log.info(log_msg)
316 log.info(log_msg)
346 # mark_changed(session)
317 # mark_changed(session)
347 redis_pipeline = Datastores.redis.pipeline(transaction=False)
318 redis_pipeline = Datastores.redis.pipeline(transaction=False)
348 key = REDIS_KEYS["counters"]["logs_per_minute"].format(current_time)
319 key = REDIS_KEYS['counters']['logs_per_minute'].format(current_time)
349 redis_pipeline.incr(key, total_logs)
320 redis_pipeline.incr(key, total_logs)
350 redis_pipeline.expire(key, 3600 * 24)
321 redis_pipeline.expire(key, 3600 * 24)
351 key = REDIS_KEYS["counters"]["events_per_minute_per_user"].format(
322 key = REDIS_KEYS['counters']['events_per_minute_per_user'].format(
352 resource.owner_user_id, current_time
323 resource.owner_user_id, current_time)
353 )
354 redis_pipeline.incr(key, total_logs)
324 redis_pipeline.incr(key, total_logs)
355 redis_pipeline.expire(key, 3600)
325 redis_pipeline.expire(key, 3600)
356 key = REDIS_KEYS["counters"]["logs_per_hour_per_app"].format(
326 key = REDIS_KEYS['counters']['logs_per_hour_per_app'].format(
357 resource_id, current_time.replace(minute=0)
327 resource_id, current_time.replace(minute=0))
358 )
359 redis_pipeline.incr(key, total_logs)
328 redis_pipeline.incr(key, total_logs)
360 redis_pipeline.expire(key, 3600 * 24 * 7)
329 redis_pipeline.expire(key, 3600 * 24 * 7)
361 redis_pipeline.sadd(
330 redis_pipeline.sadd(
362 REDIS_KEYS["apps_that_got_new_data_per_hour"].format(
331 REDIS_KEYS['apps_that_got_new_data_per_hour'].format(
363 current_time.replace(minute=0)
332 current_time.replace(minute=0)), resource_id)
364 ),
365 resource_id,
366 )
367 redis_pipeline.execute()
333 redis_pipeline.execute()
368 add_logs_es(es_docs)
334 add_logs_es(es_docs)
369 return True
335 return True
370 except Exception as exc:
336 except Exception as exc:
371 print_traceback(log)
337 print_traceback(log)
372 if celery.conf["CELERY_EAGER_PROPAGATES_EXCEPTIONS"]:
373 raise
374 add_logs.retry(exc=exc)
338 add_logs.retry(exc=exc)
375
339
376
340
377 @celery.task(queue="es", default_retry_delay=600, max_retries=144)
341 @celery.task(queue="es", default_retry_delay=600, max_retries=144)
378 def add_logs_es(es_docs):
342 def add_logs_es(es_docs):
379 for k, v in es_docs.items():
343 for k, v in es_docs.items():
380 to_update = {"_index": k, "_type": "log"}
344 Datastores.es.bulk_index(k, 'log', v)
381 [i.update(to_update) for i in v]
382 elasticsearch.helpers.bulk(Datastores.es, v)
383
345
384
346
385 @celery.task(queue="metrics", default_retry_delay=600, max_retries=144)
347 @celery.task(queue="metrics", default_retry_delay=600, max_retries=144)
@@ -391,66 +353,58 b' def add_metrics(resource_id, request_params, dataset, proto_version):'
391 es_docs = []
353 es_docs = []
392 rows = []
354 rows = []
393 for metric in dataset:
355 for metric in dataset:
394 tags = dict(metric["tags"])
356 tags = dict(metric['tags'])
395 server_n = tags.get("server_name", metric["server_name"]).lower()
357 server_n = tags.get('server_name', metric['server_name']).lower()
396 tags["server_name"] = server_n or "unknown"
358 tags['server_name'] = server_n or 'unknown'
397 new_metric = Metric(
359 new_metric = Metric(
398 timestamp=metric["timestamp"],
360 timestamp=metric['timestamp'],
399 resource_id=resource.resource_id,
361 resource_id=resource.resource_id,
400 namespace=metric["namespace"],
362 namespace=metric['namespace'],
401 tags=tags,
363 tags=tags)
402 )
403 rows.append(new_metric)
364 rows.append(new_metric)
404 es_docs.append(new_metric.es_doc())
365 es_docs.append(new_metric.es_doc())
405 session = DBSession()
366 session = DBSession()
406 session.bulk_save_objects(rows)
367 session.bulk_save_objects(rows)
407 session.flush()
368 session.flush()
408
369
409 action = "METRICS"
370 action = 'METRICS'
410 metrics_msg = "%s: %s, metrics: %s, proto:%s" % (
371 metrics_msg = '%s: %s, metrics: %s, proto:%s' % (
411 action,
372 action,
412 str(resource),
373 str(resource),
413 len(dataset),
374 len(dataset),
414 proto_version,
375 proto_version
415 )
376 )
416 log.info(metrics_msg)
377 log.info(metrics_msg)
417
378
418 mark_changed(session)
379 mark_changed(session)
419 redis_pipeline = Datastores.redis.pipeline(transaction=False)
380 redis_pipeline = Datastores.redis.pipeline(transaction=False)
420 key = REDIS_KEYS["counters"]["metrics_per_minute"].format(current_time)
381 key = REDIS_KEYS['counters']['metrics_per_minute'].format(current_time)
421 redis_pipeline.incr(key, len(rows))
382 redis_pipeline.incr(key, len(rows))
422 redis_pipeline.expire(key, 3600 * 24)
383 redis_pipeline.expire(key, 3600 * 24)
423 key = REDIS_KEYS["counters"]["events_per_minute_per_user"].format(
384 key = REDIS_KEYS['counters']['events_per_minute_per_user'].format(
424 resource.owner_user_id, current_time
385 resource.owner_user_id, current_time)
425 )
426 redis_pipeline.incr(key, len(rows))
386 redis_pipeline.incr(key, len(rows))
427 redis_pipeline.expire(key, 3600)
387 redis_pipeline.expire(key, 3600)
428 key = REDIS_KEYS["counters"]["metrics_per_hour_per_app"].format(
388 key = REDIS_KEYS['counters']['metrics_per_hour_per_app'].format(
429 resource_id, current_time.replace(minute=0)
389 resource_id, current_time.replace(minute=0))
430 )
431 redis_pipeline.incr(key, len(rows))
390 redis_pipeline.incr(key, len(rows))
432 redis_pipeline.expire(key, 3600 * 24 * 7)
391 redis_pipeline.expire(key, 3600 * 24 * 7)
433 redis_pipeline.sadd(
392 redis_pipeline.sadd(
434 REDIS_KEYS["apps_that_got_new_data_per_hour"].format(
393 REDIS_KEYS['apps_that_got_new_data_per_hour'].format(
435 current_time.replace(minute=0)
394 current_time.replace(minute=0)), resource_id)
436 ),
437 resource_id,
438 )
439 redis_pipeline.execute()
395 redis_pipeline.execute()
440 add_metrics_es(es_docs)
396 add_metrics_es(es_docs)
441 return True
397 return True
442 except Exception as exc:
398 except Exception as exc:
443 print_traceback(log)
399 print_traceback(log)
444 if celery.conf["CELERY_EAGER_PROPAGATES_EXCEPTIONS"]:
445 raise
446 add_metrics.retry(exc=exc)
400 add_metrics.retry(exc=exc)
447
401
448
402
449 @celery.task(queue="es", default_retry_delay=600, max_retries=144)
403 @celery.task(queue="es", default_retry_delay=600, max_retries=144)
450 def add_metrics_es(es_docs):
404 def add_metrics_es(es_docs):
451 for doc in es_docs:
405 for doc in es_docs:
452 partition = "rcae_m_%s" % doc["timestamp"].strftime("%Y_%m_%d")
406 partition = 'rcae_m_%s' % doc['timestamp'].strftime('%Y_%m_%d')
453 Datastores.es.index(partition, "log", doc)
407 Datastores.es.index(partition, 'log', doc)
454
408
455
409
456 @celery.task(queue="default", default_retry_delay=5, max_retries=2)
410 @celery.task(queue="default", default_retry_delay=5, max_retries=2)
@@ -461,12 +415,10 b' def check_user_report_notifications(resource_id):'
461 application = ApplicationService.by_id(resource_id)
415 application = ApplicationService.by_id(resource_id)
462 if not application:
416 if not application:
463 return
417 return
464 error_key = REDIS_KEYS["reports_to_notify_per_type_per_app"].format(
418 error_key = REDIS_KEYS['reports_to_notify_per_type_per_app'].format(
465 ReportType.error, resource_id
419 ReportType.error, resource_id)
466 )
420 slow_key = REDIS_KEYS['reports_to_notify_per_type_per_app'].format(
467 slow_key = REDIS_KEYS["reports_to_notify_per_type_per_app"].format(
421 ReportType.slow, resource_id)
468 ReportType.slow, resource_id
469 )
470 error_group_ids = Datastores.redis.smembers(error_key)
422 error_group_ids = Datastores.redis.smembers(error_key)
471 slow_group_ids = Datastores.redis.smembers(slow_key)
423 slow_group_ids = Datastores.redis.smembers(slow_key)
472 Datastores.redis.delete(error_key)
424 Datastores.redis.delete(error_key)
@@ -476,7 +428,8 b' def check_user_report_notifications(resource_id):'
476 group_ids = err_gids + slow_gids
428 group_ids = err_gids + slow_gids
477 occurence_dict = {}
429 occurence_dict = {}
478 for g_id in group_ids:
430 for g_id in group_ids:
479 key = REDIS_KEYS["counters"]["report_group_occurences"].format(g_id)
431 key = REDIS_KEYS['counters']['report_group_occurences'].format(
432 g_id)
480 val = Datastores.redis.get(key)
433 val = Datastores.redis.get(key)
481 Datastores.redis.delete(key)
434 Datastores.redis.delete(key)
482 if val:
435 if val:
@@ -487,23 +440,14 b' def check_user_report_notifications(resource_id):'
487 report_groups.options(sa.orm.joinedload(ReportGroup.last_report_ref))
440 report_groups.options(sa.orm.joinedload(ReportGroup.last_report_ref))
488
441
489 ApplicationService.check_for_groups_alert(
442 ApplicationService.check_for_groups_alert(
490 application,
443 application, 'alert', report_groups=report_groups,
491 "alert",
444 occurence_dict=occurence_dict)
492 report_groups=report_groups,
445 users = set([p.user for p in application.users_for_perm('view')])
493 occurence_dict=occurence_dict,
494 )
495 users = set(
496 [p.user for p in ResourceService.users_for_perm(application, "view")]
497 )
498 report_groups = report_groups.all()
446 report_groups = report_groups.all()
499 for user in users:
447 for user in users:
500 UserService.report_notify(
448 UserService.report_notify(user, request, application,
501 user,
449 report_groups=report_groups,
502 request,
450 occurence_dict=occurence_dict)
503 application,
504 report_groups=report_groups,
505 occurence_dict=occurence_dict,
506 )
507 for group in report_groups:
451 for group in report_groups:
508 # marks report_groups as notified
452 # marks report_groups as notified
509 if not group.notified:
453 if not group.notified:
@@ -521,12 +465,12 b' def check_alerts(resource_id):'
521 application = ApplicationService.by_id(resource_id)
465 application = ApplicationService.by_id(resource_id)
522 if not application:
466 if not application:
523 return
467 return
524 error_key = REDIS_KEYS["reports_to_notify_per_type_per_app_alerting"].format(
468 error_key = REDIS_KEYS[
525 ReportType.error, resource_id
469 'reports_to_notify_per_type_per_app_alerting'].format(
526 )
470 ReportType.error, resource_id)
527 slow_key = REDIS_KEYS["reports_to_notify_per_type_per_app_alerting"].format(
471 slow_key = REDIS_KEYS[
528 ReportType.slow, resource_id
472 'reports_to_notify_per_type_per_app_alerting'].format(
529 )
473 ReportType.slow, resource_id)
530 error_group_ids = Datastores.redis.smembers(error_key)
474 error_group_ids = Datastores.redis.smembers(error_key)
531 slow_group_ids = Datastores.redis.smembers(slow_key)
475 slow_group_ids = Datastores.redis.smembers(slow_key)
532 Datastores.redis.delete(error_key)
476 Datastores.redis.delete(error_key)
@@ -536,9 +480,9 b' def check_alerts(resource_id):'
536 group_ids = err_gids + slow_gids
480 group_ids = err_gids + slow_gids
537 occurence_dict = {}
481 occurence_dict = {}
538 for g_id in group_ids:
482 for g_id in group_ids:
539 key = REDIS_KEYS["counters"]["report_group_occurences_alerting"].format(
483 key = REDIS_KEYS['counters'][
540 g_id
484 'report_group_occurences_alerting'].format(
541 )
485 g_id)
542 val = Datastores.redis.get(key)
486 val = Datastores.redis.get(key)
543 Datastores.redis.delete(key)
487 Datastores.redis.delete(key)
544 if val:
488 if val:
@@ -549,12 +493,8 b' def check_alerts(resource_id):'
549 report_groups.options(sa.orm.joinedload(ReportGroup.last_report_ref))
493 report_groups.options(sa.orm.joinedload(ReportGroup.last_report_ref))
550
494
551 ApplicationService.check_for_groups_alert(
495 ApplicationService.check_for_groups_alert(
552 application,
496 application, 'alert', report_groups=report_groups,
553 "alert",
497 occurence_dict=occurence_dict, since_when=since_when)
554 report_groups=report_groups,
555 occurence_dict=occurence_dict,
556 since_when=since_when,
557 )
558 except Exception as exc:
498 except Exception as exc:
559 print_traceback(log)
499 print_traceback(log)
560 raise
500 raise
@@ -562,21 +502,21 b' def check_alerts(resource_id):'
562
502
563 @celery.task(queue="default", default_retry_delay=1, max_retries=2)
503 @celery.task(queue="default", default_retry_delay=1, max_retries=2)
564 def close_alerts():
504 def close_alerts():
565 log.warning("Checking alerts")
505 log.warning('Checking alerts')
566 since_when = datetime.utcnow()
506 since_when = datetime.utcnow()
567 try:
507 try:
568 event_types = [
508 event_types = [Event.types['error_report_alert'],
569 Event.types["error_report_alert"],
509 Event.types['slow_report_alert'], ]
570 Event.types["slow_report_alert"],
510 statuses = [Event.statuses['active']]
571 ]
572 statuses = [Event.statuses["active"]]
573 # get events older than 5 min
511 # get events older than 5 min
574 events = EventService.by_type_and_status(
512 events = EventService.by_type_and_status(
575 event_types, statuses, older_than=(since_when - timedelta(minutes=5))
513 event_types,
576 )
514 statuses,
515 older_than=(since_when - timedelta(minutes=5)))
577 for event in events:
516 for event in events:
578 # see if we can close them
517 # see if we can close them
579 event.validate_or_close(since_when=(since_when - timedelta(minutes=1)))
518 event.validate_or_close(
519 since_when=(since_when - timedelta(minutes=1)))
580 except Exception as exc:
520 except Exception as exc:
581 print_traceback(log)
521 print_traceback(log)
582 raise
522 raise
@@ -585,25 +525,17 b' def close_alerts():'
585 @celery.task(queue="default", default_retry_delay=600, max_retries=144)
525 @celery.task(queue="default", default_retry_delay=600, max_retries=144)
586 def update_tag_counter(tag_name, tag_value, count):
526 def update_tag_counter(tag_name, tag_value, count):
587 try:
527 try:
588 query = (
528 query = DBSession.query(Tag).filter(Tag.name == tag_name).filter(
589 DBSession.query(Tag)
529 sa.cast(Tag.value, sa.types.TEXT) == sa.cast(json.dumps(tag_value),
590 .filter(Tag.name == tag_name)
530 sa.types.TEXT))
591 .filter(
531 query.update({'times_seen': Tag.times_seen + count,
592 sa.cast(Tag.value, sa.types.TEXT)
532 'last_timestamp': datetime.utcnow()},
593 == sa.cast(json.dumps(tag_value), sa.types.TEXT)
533 synchronize_session=False)
594 )
595 )
596 query.update(
597 {"times_seen": Tag.times_seen + count, "last_timestamp": datetime.utcnow()},
598 synchronize_session=False,
599 )
600 session = DBSession()
534 session = DBSession()
601 mark_changed(session)
535 mark_changed(session)
602 return True
536 return True
603 except Exception as exc:
537 except Exception as exc:
604 print_traceback(log)
538 print_traceback(log)
605 if celery.conf["CELERY_EAGER_PROPAGATES_EXCEPTIONS"]:
606 raise
607 update_tag_counter.retry(exc=exc)
539 update_tag_counter.retry(exc=exc)
608
540
609
541
@@ -612,8 +544,8 b' def update_tag_counters():'
612 """
544 """
613 Sets task to update counters for application tags
545 Sets task to update counters for application tags
614 """
546 """
615 tags = Datastores.redis.lrange(REDIS_KEYS["seen_tag_list"], 0, -1)
547 tags = Datastores.redis.lrange(REDIS_KEYS['seen_tag_list'], 0, -1)
616 Datastores.redis.delete(REDIS_KEYS["seen_tag_list"])
548 Datastores.redis.delete(REDIS_KEYS['seen_tag_list'])
617 c = collections.Counter(tags)
549 c = collections.Counter(tags)
618 for t_json, count in c.items():
550 for t_json, count in c.items():
619 tag_info = json.loads(t_json)
551 tag_info = json.loads(t_json)
@@ -626,34 +558,28 b' def daily_digest():'
626 Sends daily digest with top 50 error reports
558 Sends daily digest with top 50 error reports
627 """
559 """
628 request = get_current_request()
560 request = get_current_request()
629 apps = Datastores.redis.smembers(REDIS_KEYS["apps_that_had_reports"])
561 apps = Datastores.redis.smembers(REDIS_KEYS['apps_that_had_reports'])
630 Datastores.redis.delete(REDIS_KEYS["apps_that_had_reports"])
562 Datastores.redis.delete(REDIS_KEYS['apps_that_had_reports'])
631 since_when = datetime.utcnow() - timedelta(hours=8)
563 since_when = datetime.utcnow() - timedelta(hours=8)
632 log.warning("Generating daily digests")
564 log.warning('Generating daily digests')
633 for resource_id in apps:
565 for resource_id in apps:
634 resource_id = resource_id.decode("utf8")
566 resource_id = resource_id.decode('utf8')
635 end_date = datetime.utcnow().replace(microsecond=0, second=0)
567 end_date = datetime.utcnow().replace(microsecond=0, second=0)
636 filter_settings = {
568 filter_settings = {'resource': [resource_id],
637 "resource": [resource_id],
569 'tags': [{'name': 'type',
638 "tags": [{"name": "type", "value": ["error"], "op": None}],
570 'value': ['error'], 'op': None}],
639 "type": "error",
571 'type': 'error', 'start_date': since_when,
640 "start_date": since_when,
572 'end_date': end_date}
641 "end_date": end_date,
642 }
643
573
644 reports = ReportGroupService.get_trending(
574 reports = ReportGroupService.get_trending(
645 request, filter_settings=filter_settings, limit=50
575 request, filter_settings=filter_settings, limit=50)
646 )
647
576
648 application = ApplicationService.by_id(resource_id)
577 application = ApplicationService.by_id(resource_id)
649 if application:
578 if application:
650 users = set(
579 users = set([p.user for p in application.users_for_perm('view')])
651 [p.user for p in ResourceService.users_for_perm(application, "view")]
652 )
653 for user in users:
580 for user in users:
654 user.send_digest(
581 user.send_digest(request, application, reports=reports,
655 request, application, reports=reports, since_when=since_when
582 since_when=since_when)
656 )
657
583
658
584
659 @celery.task(queue="default")
585 @celery.task(queue="default")
@@ -662,12 +588,11 b' def notifications_reports():'
662 Loop that checks redis for info and then issues new tasks to celery to
588 Loop that checks redis for info and then issues new tasks to celery to
663 issue notifications
589 issue notifications
664 """
590 """
665 apps = Datastores.redis.smembers(REDIS_KEYS["apps_that_had_reports"])
591 apps = Datastores.redis.smembers(REDIS_KEYS['apps_that_had_reports'])
666 Datastores.redis.delete(REDIS_KEYS["apps_that_had_reports"])
592 Datastores.redis.delete(REDIS_KEYS['apps_that_had_reports'])
667 for app in apps:
593 for app in apps:
668 log.warning("Notify for app: %s" % app)
594 log.warning('Notify for app: %s' % app)
669 check_user_report_notifications.delay(app.decode("utf8"))
595 check_user_report_notifications.delay(app.decode('utf8'))
670
671
596
672 @celery.task(queue="default")
597 @celery.task(queue="default")
673 def alerting_reports():
598 def alerting_reports():
@@ -677,29 +602,59 b' def alerting_reports():'
677 - which applications should have new alerts opened
602 - which applications should have new alerts opened
678 """
603 """
679
604
680 apps = Datastores.redis.smembers(REDIS_KEYS["apps_that_had_reports_alerting"])
605 apps = Datastores.redis.smembers(REDIS_KEYS['apps_that_had_reports_alerting'])
681 Datastores.redis.delete(REDIS_KEYS["apps_that_had_reports_alerting"])
606 Datastores.redis.delete(REDIS_KEYS['apps_that_had_reports_alerting'])
682 for app in apps:
607 for app in apps:
683 log.warning("Notify for app: %s" % app)
608 log.warning('Notify for app: %s' % app)
684 check_alerts.delay(app.decode("utf8"))
609 check_alerts.delay(app.decode('utf8'))
685
610
686
611
687 @celery.task(
612 @celery.task(queue="default", soft_time_limit=3600 * 4,
688 queue="default", soft_time_limit=3600 * 4, hard_time_limit=3600 * 4, max_retries=144
613 hard_time_limit=3600 * 4, max_retries=144)
689 )
690 def logs_cleanup(resource_id, filter_settings):
614 def logs_cleanup(resource_id, filter_settings):
691 request = get_current_request()
615 request = get_current_request()
692 request.tm.begin()
616 request.tm.begin()
693 es_query = {"query": {"bool": {"filter": [{"term": {"resource_id": resource_id}}]}}}
617 es_query = {
618 "_source": False,
619 "size": 5000,
620 "query": {
621 "filtered": {
622 "filter": {
623 "and": [{"term": {"resource_id": resource_id}}]
624 }
625 }
626 }
627 }
694
628
695 query = DBSession.query(Log).filter(Log.resource_id == resource_id)
629 query = DBSession.query(Log).filter(Log.resource_id == resource_id)
696 if filter_settings["namespace"]:
630 if filter_settings['namespace']:
697 query = query.filter(Log.namespace == filter_settings["namespace"][0])
631 query = query.filter(Log.namespace == filter_settings['namespace'][0])
698 es_query["query"]["bool"]["filter"].append(
632 es_query['query']['filtered']['filter']['and'].append(
699 {"term": {"namespace": filter_settings["namespace"][0]}}
633 {"term": {"namespace": filter_settings['namespace'][0]}}
700 )
634 )
701 query.delete(synchronize_session=False)
635 query.delete(synchronize_session=False)
702 request.tm.commit()
636 request.tm.commit()
703 Datastores.es.delete_by_query(
637 result = request.es_conn.search(es_query, index='rcae_l_*',
704 index="rcae_l_*", doc_type="log", body=es_query, conflicts="proceed"
638 doc_type='log', es_scroll='1m',
705 )
639 es_search_type='scan')
640 scroll_id = result['_scroll_id']
641 while True:
642 log.warning('log_cleanup, app:{} ns:{} batch'.format(
643 resource_id,
644 filter_settings['namespace']
645 ))
646 es_docs_to_delete = []
647 result = request.es_conn.send_request(
648 'POST', ['_search', 'scroll'],
649 body=scroll_id, query_params={"scroll": '1m'})
650 scroll_id = result['_scroll_id']
651 if not result['hits']['hits']:
652 break
653 for doc in result['hits']['hits']:
654 es_docs_to_delete.append({"id": doc['_id'],
655 "index": doc['_index']})
656
657 for batch in in_batches(es_docs_to_delete, 10):
658 Datastores.es.bulk([Datastores.es.delete_op(doc_type='log',
659 **to_del)
660 for to_del in batch])
@@ -14,7 +14,6 b''
14 # See the License for the specific language governing permissions and
14 # See the License for the specific language governing permissions and
15 # limitations under the License.
15 # limitations under the License.
16
16
17
18 def filter_callable(structure, section=None):
17 def filter_callable(structure, section=None):
19 structure["SOMEVAL"] = "***REMOVED***"
18 structure['SOMEVAL'] = '***REMOVED***'
20 return structure
19 return structure
This diff has been collapsed as it changes many lines, (877 lines changed) Show them Hide them
@@ -21,8 +21,8 b' import pyramid.threadlocal'
21 import datetime
21 import datetime
22 import appenlight.lib.helpers as h
22 import appenlight.lib.helpers as h
23
23
24 from ziggurat_foundations.models.services.user import UserService
24 from appenlight.models.user import User
25 from ziggurat_foundations.models.services.group import GroupService
25 from appenlight.models.group import Group
26 from appenlight.models import DBSession
26 from appenlight.models import DBSession
27 from appenlight.models.alert_channel import AlertChannel
27 from appenlight.models.alert_channel import AlertChannel
28 from appenlight.models.integrations import IntegrationException
28 from appenlight.models.integrations import IntegrationException
@@ -43,7 +43,7 b' _ = str'
43 strip_filter = lambda x: x.strip() if x else None
43 strip_filter = lambda x: x.strip() if x else None
44 uppercase_filter = lambda x: x.upper() if x else None
44 uppercase_filter = lambda x: x.upper() if x else None
45
45
46 FALSE_VALUES = ("false", "", False, None)
46 FALSE_VALUES = ('false', '', False, None)
47
47
48
48
49 class CSRFException(Exception):
49 class CSRFException(Exception):
@@ -51,14 +51,11 b' class CSRFException(Exception):'
51
51
52
52
53 class ReactorForm(SecureForm):
53 class ReactorForm(SecureForm):
54 def __init__(self, formdata=None, obj=None, prefix="", csrf_context=None, **kwargs):
54 def __init__(self, formdata=None, obj=None, prefix='', csrf_context=None,
55 super(ReactorForm, self).__init__(
55 **kwargs):
56 formdata=formdata,
56 super(ReactorForm, self).__init__(formdata=formdata, obj=obj,
57 obj=obj,
57 prefix=prefix,
58 prefix=prefix,
58 csrf_context=csrf_context, **kwargs)
59 csrf_context=csrf_context,
60 **kwargs
61 )
62 self._csrf_context = csrf_context
59 self._csrf_context = csrf_context
63
60
64 def generate_csrf_token(self, csrf_context):
61 def generate_csrf_token(self, csrf_context):
@@ -66,14 +63,14 b' class ReactorForm(SecureForm):'
66
63
67 def validate_csrf_token(self, field):
64 def validate_csrf_token(self, field):
68 request = self._csrf_context or pyramid.threadlocal.get_current_request()
65 request = self._csrf_context or pyramid.threadlocal.get_current_request()
69 is_from_auth_token = "auth:auth_token" in request.effective_principals
66 is_from_auth_token = 'auth:auth_token' in request.effective_principals
70 if is_from_auth_token:
67 if is_from_auth_token:
71 return True
68 return True
72
69
73 if field.data != field.current_token:
70 if field.data != field.current_token:
74 # try to save the day by using token from angular
71 # try to save the day by using token from angular
75 if request.headers.get("X-XSRF-TOKEN") != field.current_token:
72 if request.headers.get('X-XSRF-TOKEN') != field.current_token:
76 raise CSRFException("Invalid CSRF token")
73 raise CSRFException('Invalid CSRF token')
77
74
78 @property
75 @property
79 def errors_dict(self):
76 def errors_dict(self):
@@ -108,47 +105,45 b' class ReactorForm(SecureForm):'
108
105
109 class SignInForm(ReactorForm):
106 class SignInForm(ReactorForm):
110 came_from = wtforms.HiddenField()
107 came_from = wtforms.HiddenField()
111 sign_in_user_name = wtforms.StringField(_("User Name"))
108 sign_in_user_name = wtforms.StringField(_('User Name'))
112 sign_in_user_password = wtforms.PasswordField(_("Password"))
109 sign_in_user_password = wtforms.PasswordField(_('Password'))
113
110
114 ignore_labels = ["submit"]
111 ignore_labels = ['submit']
115 css_classes = {"submit": "btn btn-primary"}
112 css_classes = {'submit': 'btn btn-primary'}
116
113
117 html_attrs = {
114 html_attrs = {'sign_in_user_name': {'placeholder': 'Your login'},
118 "sign_in_user_name": {"placeholder": "Your login"},
115 'sign_in_user_password': {
119 "sign_in_user_password": {"placeholder": "Your password"},
116 'placeholder': 'Your password'}}
120 }
121
117
122
118
123 from wtforms.widgets import html_params, HTMLString
119 from wtforms.widgets import html_params, HTMLString
124
120
125
121
126 def select_multi_checkbox(field, ul_class="set", **kwargs):
122 def select_multi_checkbox(field, ul_class='set', **kwargs):
127 """Render a multi-checkbox widget"""
123 """Render a multi-checkbox widget"""
128 kwargs.setdefault("type", "checkbox")
124 kwargs.setdefault('type', 'checkbox')
129 field_id = kwargs.pop("id", field.id)
125 field_id = kwargs.pop('id', field.id)
130 html = ["<ul %s>" % html_params(id=field_id, class_=ul_class)]
126 html = ['<ul %s>' % html_params(id=field_id, class_=ul_class)]
131 for value, label, checked in field.iter_choices():
127 for value, label, checked in field.iter_choices():
132 choice_id = "%s-%s" % (field_id, value)
128 choice_id = '%s-%s' % (field_id, value)
133 options = dict(kwargs, name=field.name, value=value, id=choice_id)
129 options = dict(kwargs, name=field.name, value=value, id=choice_id)
134 if checked:
130 if checked:
135 options["checked"] = "checked"
131 options['checked'] = 'checked'
136 html.append("<li><input %s /> " % html_params(**options))
132 html.append('<li><input %s /> ' % html_params(**options))
137 html.append('<label for="%s">%s</label></li>' % (choice_id, label))
133 html.append('<label for="%s">%s</label></li>' % (choice_id, label))
138 html.append("</ul>")
134 html.append('</ul>')
139 return HTMLString("".join(html))
135 return HTMLString(''.join(html))
140
136
141
137
142 def button_widget(field, button_cls="ButtonField btn btn-default", **kwargs):
138 def button_widget(field, button_cls='ButtonField btn btn-default', **kwargs):
143 """Render a button widget"""
139 """Render a button widget"""
144 kwargs.setdefault("type", "button")
140 kwargs.setdefault('type', 'button')
145 field_id = kwargs.pop("id", field.id)
141 field_id = kwargs.pop('id', field.id)
146 kwargs.setdefault("value", field.label.text)
142 kwargs.setdefault('value', field.label.text)
147 html = [
143 html = ['<button %s>%s</button>' % (html_params(id=field_id,
148 "<button %s>%s</button>"
144 class_=button_cls),
149 % (html_params(id=field_id, class_=button_cls), kwargs["value"])
145 kwargs['value'],)]
150 ]
146 return HTMLString(''.join(html))
151 return HTMLString("".join(html))
152
147
153
148
154 def clean_whitespace(value):
149 def clean_whitespace(value):
@@ -158,36 +153,37 b' def clean_whitespace(value):'
158
153
159
154
160 def found_username_validator(form, field):
155 def found_username_validator(form, field):
161 user = UserService.by_user_name(field.data)
156 user = User.by_user_name(field.data)
162 # sets user to recover in email validator
157 # sets user to recover in email validator
163 form.field_user = user
158 form.field_user = user
164 if not user:
159 if not user:
165 raise wtforms.ValidationError("This username does not exist")
160 raise wtforms.ValidationError('This username does not exist')
166
161
167
162
168 def found_username_email_validator(form, field):
163 def found_username_email_validator(form, field):
169 user = UserService.by_email(field.data)
164 user = User.by_email(field.data)
170 if not user:
165 if not user:
171 raise wtforms.ValidationError("Email is incorrect")
166 raise wtforms.ValidationError('Email is incorrect')
172
167
173
168
174 def unique_username_validator(form, field):
169 def unique_username_validator(form, field):
175 user = UserService.by_user_name(field.data)
170 user = User.by_user_name(field.data)
176 if user:
171 if user:
177 raise wtforms.ValidationError("This username already exists in system")
172 raise wtforms.ValidationError('This username already exists in system')
178
173
179
174
180 def unique_groupname_validator(form, field):
175 def unique_groupname_validator(form, field):
181 group = GroupService.by_group_name(field.data)
176 group = Group.by_group_name(field.data)
182 mod_group = getattr(form, "_modified_group", None)
177 mod_group = getattr(form, '_modified_group', None)
183 if group and (not mod_group or mod_group.id != group.id):
178 if group and (not mod_group or mod_group.id != group.id):
184 raise wtforms.ValidationError("This group name already exists in system")
179 raise wtforms.ValidationError(
180 'This group name already exists in system')
185
181
186
182
187 def unique_email_validator(form, field):
183 def unique_email_validator(form, field):
188 user = UserService.by_email(field.data)
184 user = User.by_email(field.data)
189 if user:
185 if user:
190 raise wtforms.ValidationError("This email already exists in system")
186 raise wtforms.ValidationError('This email already exists in system')
191
187
192
188
193 def email_validator(form, field):
189 def email_validator(form, field):
@@ -200,168 +196,145 b' def email_validator(form, field):'
200
196
201 def unique_alert_email_validator(form, field):
197 def unique_alert_email_validator(form, field):
202 q = DBSession.query(AlertChannel)
198 q = DBSession.query(AlertChannel)
203 q = q.filter(AlertChannel.channel_name == "email")
199 q = q.filter(AlertChannel.channel_name == 'email')
204 q = q.filter(AlertChannel.channel_value == field.data)
200 q = q.filter(AlertChannel.channel_value == field.data)
205 email = q.first()
201 email = q.first()
206 if email:
202 if email:
207 raise wtforms.ValidationError("This email already exists in alert system")
203 raise wtforms.ValidationError(
204 'This email already exists in alert system')
208
205
209
206
210 def blocked_email_validator(form, field):
207 def blocked_email_validator(form, field):
211 blocked_emails = [
208 blocked_emails = [
212 "goood-mail.org",
209 'goood-mail.org',
213 "shoeonlineblog.com",
210 'shoeonlineblog.com',
214 "louboutinemart.com",
211 'louboutinemart.com',
215 "guccibagshere.com",
212 'guccibagshere.com',
216 "nikeshoesoutletforsale.com",
213 'nikeshoesoutletforsale.com'
217 ]
214 ]
218 data = field.data or ""
215 data = field.data or ''
219 domain = data.split("@")[-1]
216 domain = data.split('@')[-1]
220 if domain in blocked_emails:
217 if domain in blocked_emails:
221 raise wtforms.ValidationError("Don't spam")
218 raise wtforms.ValidationError('Don\'t spam')
222
219
223
220
224 def old_password_validator(form, field):
221 def old_password_validator(form, field):
225 if not UserService.check_password(field.user, field.data or ""):
222 if not field.user.check_password(field.data or ''):
226 raise wtforms.ValidationError("You need to enter correct password")
223 raise wtforms.ValidationError('You need to enter correct password')
227
224
228
225
229 class UserRegisterForm(ReactorForm):
226 class UserRegisterForm(ReactorForm):
230 user_name = wtforms.StringField(
227 user_name = wtforms.StringField(
231 _("User Name"),
228 _('User Name'),
232 filters=[strip_filter],
229 filters=[strip_filter],
233 validators=[
230 validators=[
234 wtforms.validators.Length(min=2, max=30),
231 wtforms.validators.Length(min=2, max=30),
235 wtforms.validators.Regexp(
232 wtforms.validators.Regexp(
236 re.compile(r"^[\.\w-]+$", re.UNICODE), message="Invalid characters used"
233 re.compile(r'^[\.\w-]+$', re.UNICODE),
237 ),
234 message="Invalid characters used"),
238 unique_username_validator,
235 unique_username_validator,
239 wtforms.validators.DataRequired(),
236 wtforms.validators.DataRequired()
240 ],
237 ])
241 )
242
238
243 user_password = wtforms.PasswordField(
239 user_password = wtforms.PasswordField(_('User Password'),
244 _("User Password"),
240 filters=[strip_filter],
245 filters=[strip_filter],
241 validators=[
246 validators=[
242 wtforms.validators.Length(min=4),
247 wtforms.validators.Length(min=4),
243 wtforms.validators.DataRequired()
248 wtforms.validators.DataRequired(),
244 ])
249 ],
250 )
251
245
252 email = wtforms.StringField(
246 email = wtforms.StringField(_('Email Address'),
253 _("Email Address"),
247 filters=[strip_filter],
254 filters=[strip_filter],
248 validators=[email_validator,
255 validators=[
249 unique_email_validator,
256 email_validator,
250 blocked_email_validator,
257 unique_email_validator,
251 wtforms.validators.DataRequired()])
258 blocked_email_validator,
252 first_name = wtforms.HiddenField(_('First Name'))
259 wtforms.validators.DataRequired(),
253 last_name = wtforms.HiddenField(_('Last Name'))
260 ],
261 )
262 first_name = wtforms.HiddenField(_("First Name"))
263 last_name = wtforms.HiddenField(_("Last Name"))
264
254
265 ignore_labels = ["submit"]
255 ignore_labels = ['submit']
266 css_classes = {"submit": "btn btn-primary"}
256 css_classes = {'submit': 'btn btn-primary'}
267
257
268 html_attrs = {
258 html_attrs = {'user_name': {'placeholder': 'Your login'},
269 "user_name": {"placeholder": "Your login"},
259 'user_password': {'placeholder': 'Your password'},
270 "user_password": {"placeholder": "Your password"},
260 'email': {'placeholder': 'Your email'}}
271 "email": {"placeholder": "Your email"},
272 }
273
261
274
262
275 class UserCreateForm(UserRegisterForm):
263 class UserCreateForm(UserRegisterForm):
276 status = wtforms.BooleanField("User status", false_values=FALSE_VALUES)
264 status = wtforms.BooleanField('User status',
265 false_values=FALSE_VALUES)
277
266
278
267
279 class UserUpdateForm(UserCreateForm):
268 class UserUpdateForm(UserCreateForm):
280 user_name = None
269 user_name = None
281 user_password = wtforms.PasswordField(
270 user_password = wtforms.PasswordField(_('User Password'),
282 _("User Password"),
271 filters=[strip_filter],
283 filters=[strip_filter],
272 validators=[
284 validators=[wtforms.validators.Length(min=4), wtforms.validators.Optional()],
273 wtforms.validators.Length(min=4),
285 )
274 wtforms.validators.Optional()
286 email = wtforms.StringField(
275 ])
287 _("Email Address"),
276 email = wtforms.StringField(_('Email Address'),
288 filters=[strip_filter],
277 filters=[strip_filter],
289 validators=[email_validator, wtforms.validators.DataRequired()],
278 validators=[email_validator,
290 )
279 wtforms.validators.DataRequired()])
291
280
292
281
293 class LostPasswordForm(ReactorForm):
282 class LostPasswordForm(ReactorForm):
294 email = wtforms.StringField(
283 email = wtforms.StringField(_('Email Address'),
295 _("Email Address"),
284 filters=[strip_filter],
296 filters=[strip_filter],
285 validators=[email_validator,
297 validators=[
286 found_username_email_validator,
298 email_validator,
287 wtforms.validators.DataRequired()])
299 found_username_email_validator,
300 wtforms.validators.DataRequired(),
301 ],
302 )
303
288
304 submit = wtforms.SubmitField(_("Reset password"))
289 submit = wtforms.SubmitField(_('Reset password'))
305 ignore_labels = ["submit"]
290 ignore_labels = ['submit']
306 css_classes = {"submit": "btn btn-primary"}
291 css_classes = {'submit': 'btn btn-primary'}
307
292
308
293
309 class ChangePasswordForm(ReactorForm):
294 class ChangePasswordForm(ReactorForm):
310 old_password = wtforms.PasswordField(
295 old_password = wtforms.PasswordField(
311 "Old Password",
296 'Old Password',
312 filters=[strip_filter],
297 filters=[strip_filter],
313 validators=[old_password_validator, wtforms.validators.DataRequired()],
298 validators=[old_password_validator,
314 )
299 wtforms.validators.DataRequired()])
315
300
316 new_password = wtforms.PasswordField(
301 new_password = wtforms.PasswordField(
317 "New Password",
302 'New Password',
318 filters=[strip_filter],
303 filters=[strip_filter],
319 validators=[
304 validators=[wtforms.validators.Length(min=4),
320 wtforms.validators.Length(min=4),
305 wtforms.validators.DataRequired()])
321 wtforms.validators.DataRequired(),
322 ],
323 )
324 new_password_confirm = wtforms.PasswordField(
306 new_password_confirm = wtforms.PasswordField(
325 "Confirm Password",
307 'Confirm Password',
326 filters=[strip_filter],
308 filters=[strip_filter],
327 validators=[
309 validators=[wtforms.validators.EqualTo('new_password'),
328 wtforms.validators.EqualTo("new_password"),
310 wtforms.validators.DataRequired()])
329 wtforms.validators.DataRequired(),
311 submit = wtforms.SubmitField('Change Password')
330 ],
312 ignore_labels = ['submit']
331 )
313 css_classes = {'submit': 'btn btn-primary'}
332 submit = wtforms.SubmitField("Change Password")
333 ignore_labels = ["submit"]
334 css_classes = {"submit": "btn btn-primary"}
335
314
336
315
337 class CheckPasswordForm(ReactorForm):
316 class CheckPasswordForm(ReactorForm):
338 password = wtforms.PasswordField(
317 password = wtforms.PasswordField(
339 "Password",
318 'Password',
340 filters=[strip_filter],
319 filters=[strip_filter],
341 validators=[old_password_validator, wtforms.validators.DataRequired()],
320 validators=[old_password_validator,
342 )
321 wtforms.validators.DataRequired()])
343
322
344
323
345 class NewPasswordForm(ReactorForm):
324 class NewPasswordForm(ReactorForm):
346 new_password = wtforms.PasswordField(
325 new_password = wtforms.PasswordField(
347 "New Password",
326 'New Password',
348 filters=[strip_filter],
327 filters=[strip_filter],
349 validators=[
328 validators=[wtforms.validators.Length(min=4),
350 wtforms.validators.Length(min=4),
329 wtforms.validators.DataRequired()])
351 wtforms.validators.DataRequired(),
352 ],
353 )
354 new_password_confirm = wtforms.PasswordField(
330 new_password_confirm = wtforms.PasswordField(
355 "Confirm Password",
331 'Confirm Password',
356 filters=[strip_filter],
332 filters=[strip_filter],
357 validators=[
333 validators=[wtforms.validators.EqualTo('new_password'),
358 wtforms.validators.EqualTo("new_password"),
334 wtforms.validators.DataRequired()])
359 wtforms.validators.DataRequired(),
335 submit = wtforms.SubmitField('Set Password')
360 ],
336 ignore_labels = ['submit']
361 )
337 css_classes = {'submit': 'btn btn-primary'}
362 submit = wtforms.SubmitField("Set Password")
363 ignore_labels = ["submit"]
364 css_classes = {"submit": "btn btn-primary"}
365
338
366
339
367 class CORSTextAreaField(wtforms.StringField):
340 class CORSTextAreaField(wtforms.StringField):
@@ -369,290 +342,261 b' class CORSTextAreaField(wtforms.StringField):'
369 This field represents an HTML ``<textarea>`` and can be used to take
342 This field represents an HTML ``<textarea>`` and can be used to take
370 multi-line input.
343 multi-line input.
371 """
344 """
372
373 widget = wtforms.widgets.TextArea()
345 widget = wtforms.widgets.TextArea()
374
346
375 def process_formdata(self, valuelist):
347 def process_formdata(self, valuelist):
376 self.data = []
348 self.data = []
377 if valuelist:
349 if valuelist:
378 data = [x.strip() for x in valuelist[0].split("\n")]
350 data = [x.strip() for x in valuelist[0].split('\n')]
379 for d in data:
351 for d in data:
380 if not d:
352 if not d:
381 continue
353 continue
382 if d.startswith("www."):
354 if d.startswith('www.'):
383 d = d[4:]
355 d = d[4:]
384 if data:
356 if data:
385 self.data.append(d)
357 self.data.append(d)
386 else:
358 else:
387 self.data = []
359 self.data = []
388 self.data = "\n".join(self.data)
360 self.data = '\n'.join(self.data)
389
361
390
362
391 class ApplicationCreateForm(ReactorForm):
363 class ApplicationCreateForm(ReactorForm):
392 resource_name = wtforms.StringField(
364 resource_name = wtforms.StringField(
393 _("Application name"),
365 _('Application name'),
394 filters=[strip_filter],
366 filters=[strip_filter],
395 validators=[
367 validators=[wtforms.validators.Length(min=1),
396 wtforms.validators.Length(min=1),
368 wtforms.validators.DataRequired()])
397 wtforms.validators.DataRequired(),
398 ],
399 )
400
369
401 domains = CORSTextAreaField(
370 domains = CORSTextAreaField(
402 _("Domain names for CORS headers "),
371 _('Domain names for CORS headers '),
403 validators=[wtforms.validators.Length(min=1), wtforms.validators.Optional()],
372 validators=[wtforms.validators.Length(min=1),
404 description="Required for Javascript error "
373 wtforms.validators.Optional()],
405 "tracking (one line one domain, skip http:// part)",
374 description='Required for Javascript error '
406 )
375 'tracking (one line one domain, skip http:// part)')
407
376
408 submit = wtforms.SubmitField(_("Create Application"))
377 submit = wtforms.SubmitField(_('Create Application'))
409
378
410 ignore_labels = ["submit"]
379 ignore_labels = ['submit']
411 css_classes = {"submit": "btn btn-primary"}
380 css_classes = {'submit': 'btn btn-primary'}
412 html_attrs = {
381 html_attrs = {'resource_name': {'placeholder': 'Application Name'},
413 "resource_name": {"placeholder": "Application Name"},
382 'uptime_url': {'placeholder': 'http://somedomain.com'}}
414 "uptime_url": {"placeholder": "http://somedomain.com"},
415 }
416
383
417
384
418 class ApplicationUpdateForm(ApplicationCreateForm):
385 class ApplicationUpdateForm(ApplicationCreateForm):
419 default_grouping = wtforms.SelectField(
386 default_grouping = wtforms.SelectField(
420 _("Default grouping for errors"),
387 _('Default grouping for errors'),
421 choices=[
388 choices=[('url_type', 'Error Type + location',),
422 ("url_type", "Error Type + location"),
389 ('url_traceback', 'Traceback + location',),
423 ("url_traceback", "Traceback + location"),
390 ('traceback_server', 'Traceback + Server',)],
424 ("traceback_server", "Traceback + Server"),
391 default='url_traceback')
425 ],
426 default="url_traceback",
427 )
428
392
429 error_report_threshold = wtforms.IntegerField(
393 error_report_threshold = wtforms.IntegerField(
430 _("Alert on error reports"),
394 _('Alert on error reports'),
431 validators=[
395 validators=[
432 wtforms.validators.NumberRange(min=1),
396 wtforms.validators.NumberRange(min=1),
433 wtforms.validators.DataRequired(),
397 wtforms.validators.DataRequired()
434 ],
398 ],
435 description="Application requires to send at least this amount of "
399 description='Application requires to send at least this amount of '
436 "error reports per minute to open alert",
400 'error reports per minute to open alert'
437 )
401 )
438
402
439 slow_report_threshold = wtforms.IntegerField(
403 slow_report_threshold = wtforms.IntegerField(
440 _("Alert on slow reports"),
404 _('Alert on slow reports'),
441 validators=[
405 validators=[wtforms.validators.NumberRange(min=1),
442 wtforms.validators.NumberRange(min=1),
406 wtforms.validators.DataRequired()],
443 wtforms.validators.DataRequired(),
407 description='Application requires to send at least this amount of '
444 ],
408 'slow reports per minute to open alert')
445 description="Application requires to send at least this amount of "
446 "slow reports per minute to open alert",
447 )
448
409
449 allow_permanent_storage = wtforms.BooleanField(
410 allow_permanent_storage = wtforms.BooleanField(
450 _("Permanent logs"),
411 _('Permanent logs'),
451 false_values=FALSE_VALUES,
412 false_values=FALSE_VALUES,
452 description=_("Allow permanent storage of logs in separate DB partitions"),
413 description=_(
453 )
414 'Allow permanent storage of logs in separate DB partitions'))
454
415
455 submit = wtforms.SubmitField(_("Create Application"))
416 submit = wtforms.SubmitField(_('Create Application'))
456
417
457
418
458 class UserSearchSchemaForm(ReactorForm):
419 class UserSearchSchemaForm(ReactorForm):
459 user_name = wtforms.StringField("User Name", filters=[strip_filter])
420 user_name = wtforms.StringField('User Name',
421 filters=[strip_filter], )
460
422
461 submit = wtforms.SubmitField(_("Search User"))
423 submit = wtforms.SubmitField(_('Search User'))
462 ignore_labels = ["submit"]
424 ignore_labels = ['submit']
463 css_classes = {"submit": "btn btn-primary"}
425 css_classes = {'submit': 'btn btn-primary'}
464
426
465 '<li class="user_exists"><span></span></li>'
427 '<li class="user_exists"><span></span></li>'
466
428
467
429
468 class YesNoForm(ReactorForm):
430 class YesNoForm(ReactorForm):
469 no = wtforms.SubmitField("No", default="")
431 no = wtforms.SubmitField('No', default='')
470 yes = wtforms.SubmitField("Yes", default="")
432 yes = wtforms.SubmitField('Yes', default='')
471 ignore_labels = ["submit"]
433 ignore_labels = ['submit']
472 css_classes = {"submit": "btn btn-primary"}
434 css_classes = {'submit': 'btn btn-primary'}
473
435
474
436
475 status_codes = [("", "All"), ("500", "500"), ("404", "404")]
437 status_codes = [('', 'All',), ('500', '500',), ('404', '404',)]
476
438
477 priorities = [("", "All")]
439 priorities = [('', 'All',)]
478 for i in range(1, 11):
440 for i in range(1, 11):
479 priorities.append((str(i), str(i)))
441 priorities.append((str(i), str(i),))
480
442
481 report_status_choices = [
443 report_status_choices = [('', 'All',),
482 ("", "All"),
444 ('never_reviewed', 'Never revieved',),
483 ("never_reviewed", "Never revieved"),
445 ('reviewed', 'Revieved',),
484 ("reviewed", "Revieved"),
446 ('public', 'Public',),
485 ("public", "Public"),
447 ('fixed', 'Fixed',), ]
486 ("fixed", "Fixed"),
487 ]
488
448
489
449
490 class ReportBrowserForm(ReactorForm):
450 class ReportBrowserForm(ReactorForm):
491 applications = wtforms.SelectMultipleField(
451 applications = wtforms.SelectMultipleField('Applications',
492 "Applications", widget=select_multi_checkbox
452 widget=select_multi_checkbox)
493 )
453 http_status = wtforms.SelectField('HTTP Status', choices=status_codes)
494 http_status = wtforms.SelectField("HTTP Status", choices=status_codes)
454 priority = wtforms.SelectField('Priority', choices=priorities, default='')
495 priority = wtforms.SelectField("Priority", choices=priorities, default="")
455 start_date = wtforms.DateField('Start Date')
496 start_date = wtforms.DateField("Start Date")
456 end_date = wtforms.DateField('End Date')
497 end_date = wtforms.DateField("End Date")
457 error = wtforms.StringField('Error')
498 error = wtforms.StringField("Error")
458 url_path = wtforms.StringField('URL Path')
499 url_path = wtforms.StringField("URL Path")
459 url_domain = wtforms.StringField('URL Domain')
500 url_domain = wtforms.StringField("URL Domain")
460 report_status = wtforms.SelectField('Report status',
501 report_status = wtforms.SelectField(
461 choices=report_status_choices,
502 "Report status", choices=report_status_choices, default=""
462 default='')
503 )
463 submit = wtforms.SubmitField('<span class="glyphicon glyphicon-search">'
504 submit = wtforms.SubmitField(
464 '</span> Filter results',
505 '<span class="glyphicon glyphicon-search">' "</span> Filter results",
465 widget=button_widget)
506 widget=button_widget,
466
507 )
467 ignore_labels = ['submit']
508
468 css_classes = {'submit': 'btn btn-primary'}
509 ignore_labels = ["submit"]
469
510 css_classes = {"submit": "btn btn-primary"}
470
511
471 slow_report_status_choices = [('', 'All',),
512
472 ('never_reviewed', 'Never revieved',),
513 slow_report_status_choices = [
473 ('reviewed', 'Revieved',),
514 ("", "All"),
474 ('public', 'Public',), ]
515 ("never_reviewed", "Never revieved"),
516 ("reviewed", "Revieved"),
517 ("public", "Public"),
518 ]
519
475
520
476
521 class BulkOperationForm(ReactorForm):
477 class BulkOperationForm(ReactorForm):
522 applications = wtforms.SelectField("Applications")
478 applications = wtforms.SelectField('Applications')
523 start_date = wtforms.DateField(
479 start_date = wtforms.DateField(
524 "Start Date",
480 'Start Date',
525 default=lambda: datetime.datetime.utcnow() - datetime.timedelta(days=90),
481 default=lambda: datetime.datetime.utcnow() - datetime.timedelta(
526 )
482 days=90))
527 end_date = wtforms.DateField("End Date")
483 end_date = wtforms.DateField('End Date')
528 confirm = wtforms.BooleanField(
484 confirm = wtforms.BooleanField(
529 "Confirm operation", validators=[wtforms.validators.DataRequired()]
485 'Confirm operation',
530 )
486 validators=[wtforms.validators.DataRequired()])
531
487
532
488
533 class LogBrowserForm(ReactorForm):
489 class LogBrowserForm(ReactorForm):
534 applications = wtforms.SelectMultipleField(
490 applications = wtforms.SelectMultipleField('Applications',
535 "Applications", widget=select_multi_checkbox
491 widget=select_multi_checkbox)
536 )
492 start_date = wtforms.DateField('Start Date')
537 start_date = wtforms.DateField("Start Date")
493 log_level = wtforms.StringField('Log level')
538 log_level = wtforms.StringField("Log level")
494 message = wtforms.StringField('Message')
539 message = wtforms.StringField("Message")
495 namespace = wtforms.StringField('Namespace')
540 namespace = wtforms.StringField("Namespace")
541 submit = wtforms.SubmitField(
496 submit = wtforms.SubmitField(
542 '<span class="glyphicon glyphicon-search"></span> Filter results',
497 '<span class="glyphicon glyphicon-search"></span> Filter results',
543 widget=button_widget,
498 widget=button_widget)
544 )
499 ignore_labels = ['submit']
545 ignore_labels = ["submit"]
500 css_classes = {'submit': 'btn btn-primary'}
546 css_classes = {"submit": "btn btn-primary"}
547
501
548
502
549 class CommentForm(ReactorForm):
503 class CommentForm(ReactorForm):
550 body = wtforms.TextAreaField(
504 body = wtforms.TextAreaField('Comment', validators=[
551 "Comment",
505 wtforms.validators.Length(min=1),
552 validators=[
506 wtforms.validators.DataRequired()
553 wtforms.validators.Length(min=1),
507 ])
554 wtforms.validators.DataRequired(),
508 submit = wtforms.SubmitField('Comment', )
555 ],
509 ignore_labels = ['submit']
556 )
510 css_classes = {'submit': 'btn btn-primary'}
557 submit = wtforms.SubmitField("Comment")
558 ignore_labels = ["submit"]
559 css_classes = {"submit": "btn btn-primary"}
560
511
561
512
562 class EmailChannelCreateForm(ReactorForm):
513 class EmailChannelCreateForm(ReactorForm):
563 email = wtforms.StringField(
514 email = wtforms.StringField(_('Email Address'),
564 _("Email Address"),
515 filters=[strip_filter],
565 filters=[strip_filter],
516 validators=[email_validator,
566 validators=[
517 unique_alert_email_validator,
567 email_validator,
518 wtforms.validators.DataRequired()])
568 unique_alert_email_validator,
519 submit = wtforms.SubmitField('Add email channel', )
569 wtforms.validators.DataRequired(),
520 ignore_labels = ['submit']
570 ],
521 css_classes = {'submit': 'btn btn-primary'}
571 )
572 submit = wtforms.SubmitField("Add email channel")
573 ignore_labels = ["submit"]
574 css_classes = {"submit": "btn btn-primary"}
575
522
576
523
577 def gen_user_profile_form():
524 def gen_user_profile_form():
578 class UserProfileForm(ReactorForm):
525 class UserProfileForm(ReactorForm):
579 email = wtforms.StringField(
526 email = wtforms.StringField(
580 _("Email Address"),
527 _('Email Address'),
581 validators=[email_validator, wtforms.validators.DataRequired()],
528 validators=[email_validator, wtforms.validators.DataRequired()])
582 )
529 first_name = wtforms.StringField(_('First Name'))
583 first_name = wtforms.StringField(_("First Name"))
530 last_name = wtforms.StringField(_('Last Name'))
584 last_name = wtforms.StringField(_("Last Name"))
531 company_name = wtforms.StringField(_('Company Name'))
585 company_name = wtforms.StringField(_("Company Name"))
532 company_address = wtforms.TextAreaField(_('Company Address'))
586 company_address = wtforms.TextAreaField(_("Company Address"))
533 zip_code = wtforms.StringField(_('ZIP code'))
587 zip_code = wtforms.StringField(_("ZIP code"))
534 city = wtforms.StringField(_('City'))
588 city = wtforms.StringField(_("City"))
535 notifications = wtforms.BooleanField('Account notifications',
589 notifications = wtforms.BooleanField(
536 false_values=FALSE_VALUES)
590 "Account notifications", false_values=FALSE_VALUES
537 submit = wtforms.SubmitField(_('Update Account'))
591 )
538 ignore_labels = ['submit']
592 submit = wtforms.SubmitField(_("Update Account"))
539 css_classes = {'submit': 'btn btn-primary'}
593 ignore_labels = ["submit"]
594 css_classes = {"submit": "btn btn-primary"}
595
540
596 return UserProfileForm
541 return UserProfileForm
597
542
598
543
599 class PurgeAppForm(ReactorForm):
544 class PurgeAppForm(ReactorForm):
600 resource_id = wtforms.HiddenField(
545 resource_id = wtforms.HiddenField(
601 "App Id", validators=[wtforms.validators.DataRequired()]
546 'App Id',
602 )
547 validators=[wtforms.validators.DataRequired()])
603 days = wtforms.IntegerField("Days", validators=[wtforms.validators.DataRequired()])
548 days = wtforms.IntegerField(
549 'Days',
550 validators=[wtforms.validators.DataRequired()])
604 password = wtforms.PasswordField(
551 password = wtforms.PasswordField(
605 "Admin Password",
552 'Admin Password',
606 validators=[old_password_validator, wtforms.validators.DataRequired()],
553 validators=[old_password_validator, wtforms.validators.DataRequired()])
607 )
554 submit = wtforms.SubmitField(_('Purge Data'))
608 submit = wtforms.SubmitField(_("Purge Data"))
555 ignore_labels = ['submit']
609 ignore_labels = ["submit"]
556 css_classes = {'submit': 'btn btn-primary'}
610 css_classes = {"submit": "btn btn-primary"}
611
557
612
558
613 class IntegrationRepoForm(ReactorForm):
559 class IntegrationRepoForm(ReactorForm):
614 host_name = wtforms.StringField("Service Host", default="")
560 host_name = wtforms.StringField("Service Host", default='')
615 user_name = wtforms.StringField(
561 user_name = wtforms.StringField(
616 "User Name",
562 "User Name",
617 filters=[strip_filter],
563 filters=[strip_filter],
618 validators=[
564 validators=[wtforms.validators.DataRequired(),
619 wtforms.validators.DataRequired(),
565 wtforms.validators.Length(min=1)])
620 wtforms.validators.Length(min=1),
621 ],
622 )
623 repo_name = wtforms.StringField(
566 repo_name = wtforms.StringField(
624 "Repo Name",
567 "Repo Name",
625 filters=[strip_filter],
568 filters=[strip_filter],
626 validators=[
569 validators=[wtforms.validators.DataRequired(),
627 wtforms.validators.DataRequired(),
570 wtforms.validators.Length(min=1)])
628 wtforms.validators.Length(min=1),
629 ],
630 )
631
571
632
572
633 class IntegrationBitbucketForm(IntegrationRepoForm):
573 class IntegrationBitbucketForm(IntegrationRepoForm):
634 host_name = wtforms.StringField("Service Host", default="https://bitbucket.org")
574 host_name = wtforms.StringField("Service Host",
575 default='https://bitbucket.org')
635
576
636 def validate_user_name(self, field):
577 def validate_user_name(self, field):
637 try:
578 try:
638 request = pyramid.threadlocal.get_current_request()
579 request = pyramid.threadlocal.get_current_request()
639 client = BitbucketIntegration.create_client(
580 client = BitbucketIntegration.create_client(
640 request, self.user_name.data, self.repo_name.data
581 request,
641 )
582 self.user_name.data,
583 self.repo_name.data)
642 client.get_assignees()
584 client.get_assignees()
643 except IntegrationException as e:
585 except IntegrationException as e:
644 raise wtforms.validators.ValidationError(str(e))
586 raise wtforms.validators.ValidationError(str(e))
645
587
646
588
647 class IntegrationGithubForm(IntegrationRepoForm):
589 class IntegrationGithubForm(IntegrationRepoForm):
648 host_name = wtforms.StringField("Service Host", default="https://github.com")
590 host_name = wtforms.StringField("Service Host",
591 default='https://github.com')
649
592
650 def validate_user_name(self, field):
593 def validate_user_name(self, field):
651 try:
594 try:
652 request = pyramid.threadlocal.get_current_request()
595 request = pyramid.threadlocal.get_current_request()
653 client = GithubIntegration.create_client(
596 client = GithubIntegration.create_client(
654 request, self.user_name.data, self.repo_name.data
597 request,
655 )
598 self.user_name.data,
599 self.repo_name.data)
656 client.get_assignees()
600 client.get_assignees()
657 except IntegrationException as e:
601 except IntegrationException as e:
658 raise wtforms.validators.ValidationError(str(e))
602 raise wtforms.validators.ValidationError(str(e))
@@ -661,28 +605,25 b' class IntegrationGithubForm(IntegrationRepoForm):'
661
605
662 def filter_rooms(data):
606 def filter_rooms(data):
663 if data is not None:
607 if data is not None:
664 rooms = data.split(",")
608 rooms = data.split(',')
665 return ",".join([r.strip() for r in rooms])
609 return ','.join([r.strip() for r in rooms])
666
610
667
611
668 class IntegrationCampfireForm(ReactorForm):
612 class IntegrationCampfireForm(ReactorForm):
669 account = wtforms.StringField(
613 account = wtforms.StringField(
670 "Account",
614 'Account',
671 filters=[strip_filter],
615 filters=[strip_filter],
672 validators=[wtforms.validators.DataRequired()],
616 validators=[wtforms.validators.DataRequired()])
673 )
674 api_token = wtforms.StringField(
617 api_token = wtforms.StringField(
675 "Api Token",
618 'Api Token',
676 filters=[strip_filter],
619 filters=[strip_filter],
677 validators=[wtforms.validators.DataRequired()],
620 validators=[wtforms.validators.DataRequired()])
678 )
621 rooms = wtforms.StringField('Room ID list', filters=[filter_rooms])
679 rooms = wtforms.StringField("Room ID list", filters=[filter_rooms])
680
622
681 def validate_api_token(self, field):
623 def validate_api_token(self, field):
682 try:
624 try:
683 client = CampfireIntegration.create_client(
625 client = CampfireIntegration.create_client(self.api_token.data,
684 self.api_token.data, self.account.data
626 self.account.data)
685 )
686 client.get_account()
627 client.get_account()
687 except IntegrationException as e:
628 except IntegrationException as e:
688 raise wtforms.validators.ValidationError(str(e))
629 raise wtforms.validators.ValidationError(str(e))
@@ -690,18 +631,17 b' class IntegrationCampfireForm(ReactorForm):'
690 def validate_rooms(self, field):
631 def validate_rooms(self, field):
691 if not field.data:
632 if not field.data:
692 return
633 return
693 client = CampfireIntegration.create_client(
634 client = CampfireIntegration.create_client(self.api_token.data,
694 self.api_token.data, self.account.data
635 self.account.data)
695 )
696
636
697 try:
637 try:
698 room_list = [r["id"] for r in client.get_rooms()]
638 room_list = [r['id'] for r in client.get_rooms()]
699 except IntegrationException as e:
639 except IntegrationException as e:
700 raise wtforms.validators.ValidationError(str(e))
640 raise wtforms.validators.ValidationError(str(e))
701
641
702 rooms = field.data.split(",")
642 rooms = field.data.split(',')
703 if len(rooms) > 3:
643 if len(rooms) > 3:
704 msg = "You can use up to 3 room ids"
644 msg = 'You can use up to 3 room ids'
705 raise wtforms.validators.ValidationError(msg)
645 raise wtforms.validators.ValidationError(msg)
706 if rooms:
646 if rooms:
707 for room_id in rooms:
647 for room_id in rooms:
@@ -709,78 +649,75 b' class IntegrationCampfireForm(ReactorForm):'
709 msg = "Room %s doesn't exist"
649 msg = "Room %s doesn't exist"
710 raise wtforms.validators.ValidationError(msg % room_id)
650 raise wtforms.validators.ValidationError(msg % room_id)
711 if not room_id.strip().isdigit():
651 if not room_id.strip().isdigit():
712 msg = "You must use only integers for room ids"
652 msg = 'You must use only integers for room ids'
713 raise wtforms.validators.ValidationError(msg)
653 raise wtforms.validators.ValidationError(msg)
714
654
715 submit = wtforms.SubmitField(_("Connect to Campfire"))
655 submit = wtforms.SubmitField(_('Connect to Campfire'))
716 ignore_labels = ["submit"]
656 ignore_labels = ['submit']
717 css_classes = {"submit": "btn btn-primary"}
657 css_classes = {'submit': 'btn btn-primary'}
718
658
719
659
720 def filter_rooms(data):
660 def filter_rooms(data):
721 if data is not None:
661 if data is not None:
722 rooms = data.split(",")
662 rooms = data.split(',')
723 return ",".join([r.strip() for r in rooms])
663 return ','.join([r.strip() for r in rooms])
724
664
725
665
726 class IntegrationHipchatForm(ReactorForm):
666 class IntegrationHipchatForm(ReactorForm):
727 api_token = wtforms.StringField(
667 api_token = wtforms.StringField(
728 "Api Token",
668 'Api Token',
729 filters=[strip_filter],
669 filters=[strip_filter],
730 validators=[wtforms.validators.DataRequired()],
670 validators=[wtforms.validators.DataRequired()])
731 )
732 rooms = wtforms.StringField(
671 rooms = wtforms.StringField(
733 "Room ID list",
672 'Room ID list',
734 filters=[filter_rooms],
673 filters=[filter_rooms],
735 validators=[wtforms.validators.DataRequired()],
674 validators=[wtforms.validators.DataRequired()])
736 )
737
675
738 def validate_rooms(self, field):
676 def validate_rooms(self, field):
739 if not field.data:
677 if not field.data:
740 return
678 return
741 client = HipchatIntegration.create_client(self.api_token.data)
679 client = HipchatIntegration.create_client(self.api_token.data)
742 rooms = field.data.split(",")
680 rooms = field.data.split(',')
743 if len(rooms) > 3:
681 if len(rooms) > 3:
744 msg = "You can use up to 3 room ids"
682 msg = 'You can use up to 3 room ids'
745 raise wtforms.validators.ValidationError(msg)
683 raise wtforms.validators.ValidationError(msg)
746 if rooms:
684 if rooms:
747 for room_id in rooms:
685 for room_id in rooms:
748 if not room_id.strip().isdigit():
686 if not room_id.strip().isdigit():
749 msg = "You must use only integers for room ids"
687 msg = 'You must use only integers for room ids'
750 raise wtforms.validators.ValidationError(msg)
688 raise wtforms.validators.ValidationError(msg)
751 try:
689 try:
752 client.send(
690 client.send({
753 {
691 "message_format": 'text',
754 "message_format": "text",
692 "message": "testing for room existence",
755 "message": "testing for room existence",
693 "from": "AppEnlight",
756 "from": "AppEnlight",
694 "room_id": room_id,
757 "room_id": room_id,
695 "color": "green"
758 "color": "green",
696 })
759 }
760 )
761 except IntegrationException as exc:
697 except IntegrationException as exc:
762 msg = "Room id: %s exception: %s"
698 msg = 'Room id: %s exception: %s'
763 raise wtforms.validators.ValidationError(msg % (room_id, exc))
699 raise wtforms.validators.ValidationError(msg % (room_id,
700 exc))
764
701
765
702
766 class IntegrationFlowdockForm(ReactorForm):
703 class IntegrationFlowdockForm(ReactorForm):
767 api_token = wtforms.StringField(
704 api_token = wtforms.StringField('API Token',
768 "API Token",
705 filters=[strip_filter],
769 filters=[strip_filter],
706 validators=[
770 validators=[wtforms.validators.DataRequired()],
707 wtforms.validators.DataRequired()
771 )
708 ], )
772
709
773 def validate_api_token(self, field):
710 def validate_api_token(self, field):
774 try:
711 try:
775 client = FlowdockIntegration.create_client(self.api_token.data)
712 client = FlowdockIntegration.create_client(self.api_token.data)
776 registry = pyramid.threadlocal.get_current_registry()
713 registry = pyramid.threadlocal.get_current_registry()
777 payload = {
714 payload = {
778 "source": registry.settings["mailing.from_name"],
715 "source": registry.settings['mailing.from_name'],
779 "from_address": registry.settings["mailing.from_email"],
716 "from_address": registry.settings['mailing.from_email'],
780 "subject": "Integration test",
717 "subject": "Integration test",
781 "content": "If you can see this it was successful",
718 "content": "If you can see this it was successful",
782 "tags": ["appenlight"],
719 "tags": ["appenlight"],
783 "link": registry.settings["mailing.app_url"],
720 "link": registry.settings['mailing.app_url']
784 }
721 }
785 client.send_to_inbox(payload)
722 client.send_to_inbox(payload)
786 except IntegrationException as e:
723 except IntegrationException as e:
@@ -789,35 +726,30 b' class IntegrationFlowdockForm(ReactorForm):'
789
726
790 class IntegrationSlackForm(ReactorForm):
727 class IntegrationSlackForm(ReactorForm):
791 webhook_url = wtforms.StringField(
728 webhook_url = wtforms.StringField(
792 "Reports webhook",
729 'Reports webhook',
793 filters=[strip_filter],
730 filters=[strip_filter],
794 validators=[wtforms.validators.DataRequired()],
731 validators=[wtforms.validators.DataRequired()])
795 )
796
732
797 def validate_webhook_url(self, field):
733 def validate_webhook_url(self, field):
798 registry = pyramid.threadlocal.get_current_registry()
734 registry = pyramid.threadlocal.get_current_registry()
799 client = SlackIntegration.create_client(field.data)
735 client = SlackIntegration.create_client(field.data)
800 link = "<%s|%s>" % (
736 link = "<%s|%s>" % (registry.settings['mailing.app_url'],
801 registry.settings["mailing.app_url"],
737 registry.settings['mailing.from_name'])
802 registry.settings["mailing.from_name"],
803 )
804 test_data = {
738 test_data = {
805 "username": "AppEnlight",
739 "username": "AppEnlight",
806 "icon_emoji": ":fire:",
740 "icon_emoji": ":fire:",
807 "attachments": [
741 "attachments": [
808 {
742 {"fallback": "Testing integration channel: %s" % link,
809 "fallback": "Testing integration channel: %s" % link,
743 "pretext": "Testing integration channel: %s" % link,
810 "pretext": "Testing integration channel: %s" % link,
744 "color": "good",
811 "color": "good",
745 "fields": [
812 "fields": [
746 {
813 {
747 "title": "Status",
814 "title": "Status",
748 "value": "Integration is working fine",
815 "value": "Integration is working fine",
749 "short": False
816 "short": False,
750 }
817 }
751 ]}
818 ],
752 ]
819 }
820 ],
821 }
753 }
822 try:
754 try:
823 client.make_request(data=test_data)
755 client.make_request(data=test_data)
@@ -827,52 +759,44 b' class IntegrationSlackForm(ReactorForm):'
827
759
828 class IntegrationWebhooksForm(ReactorForm):
760 class IntegrationWebhooksForm(ReactorForm):
829 reports_webhook = wtforms.StringField(
761 reports_webhook = wtforms.StringField(
830 "Reports webhook",
762 'Reports webhook',
831 filters=[strip_filter],
763 filters=[strip_filter],
832 validators=[wtforms.validators.DataRequired()],
764 validators=[wtforms.validators.DataRequired()])
833 )
834 alerts_webhook = wtforms.StringField(
765 alerts_webhook = wtforms.StringField(
835 "Alerts webhook",
766 'Alerts webhook',
836 filters=[strip_filter],
767 filters=[strip_filter],
837 validators=[wtforms.validators.DataRequired()],
768 validators=[wtforms.validators.DataRequired()])
838 )
769 submit = wtforms.SubmitField(_('Setup webhooks'))
839 submit = wtforms.SubmitField(_("Setup webhooks"))
770 ignore_labels = ['submit']
840 ignore_labels = ["submit"]
771 css_classes = {'submit': 'btn btn-primary'}
841 css_classes = {"submit": "btn btn-primary"}
842
772
843
773
844 class IntegrationJiraForm(ReactorForm):
774 class IntegrationJiraForm(ReactorForm):
845 host_name = wtforms.StringField(
775 host_name = wtforms.StringField(
846 "Server URL",
776 'Server URL',
847 filters=[strip_filter],
777 filters=[strip_filter],
848 validators=[wtforms.validators.DataRequired()],
778 validators=[wtforms.validators.DataRequired()])
849 )
850 user_name = wtforms.StringField(
779 user_name = wtforms.StringField(
851 "Username",
780 'Username',
852 filters=[strip_filter],
781 filters=[strip_filter],
853 validators=[wtforms.validators.DataRequired()],
782 validators=[wtforms.validators.DataRequired()])
854 )
855 password = wtforms.PasswordField(
783 password = wtforms.PasswordField(
856 "Password",
784 'Password',
857 filters=[strip_filter],
785 filters=[strip_filter],
858 validators=[wtforms.validators.DataRequired()],
786 validators=[wtforms.validators.DataRequired()])
859 )
860 project = wtforms.StringField(
787 project = wtforms.StringField(
861 "Project key",
788 'Project key',
862 filters=[uppercase_filter, strip_filter],
789 filters=[uppercase_filter, strip_filter],
863 validators=[wtforms.validators.DataRequired()],
790 validators=[wtforms.validators.DataRequired()])
864 )
865
791
866 def validate_project(self, field):
792 def validate_project(self, field):
867 if not field.data:
793 if not field.data:
868 return
794 return
869 try:
795 try:
870 client = JiraClient(
796 client = JiraClient(self.user_name.data,
871 self.user_name.data,
797 self.password.data,
872 self.password.data,
798 self.host_name.data,
873 self.host_name.data,
799 self.project.data)
874 self.project.data,
875 )
876 except Exception as exc:
800 except Exception as exc:
877 raise wtforms.validators.ValidationError(str(exc))
801 raise wtforms.validators.ValidationError(str(exc))
878
802
@@ -885,97 +809,88 b' class IntegrationJiraForm(ReactorForm):'
885 def get_deletion_form(resource):
809 def get_deletion_form(resource):
886 class F(ReactorForm):
810 class F(ReactorForm):
887 application_name = wtforms.StringField(
811 application_name = wtforms.StringField(
888 "Application Name",
812 'Application Name',
889 filters=[strip_filter],
813 filters=[strip_filter],
890 validators=[wtforms.validators.AnyOf([resource.resource_name])],
814 validators=[wtforms.validators.AnyOf([resource.resource_name])])
891 )
892 resource_id = wtforms.HiddenField(default=resource.resource_id)
815 resource_id = wtforms.HiddenField(default=resource.resource_id)
893 submit = wtforms.SubmitField(_("Delete my application"))
816 submit = wtforms.SubmitField(_('Delete my application'))
894 ignore_labels = ["submit"]
817 ignore_labels = ['submit']
895 css_classes = {"submit": "btn btn-danger"}
818 css_classes = {'submit': 'btn btn-danger'}
896
819
897 return F
820 return F
898
821
899
822
900 class ChangeApplicationOwnerForm(ReactorForm):
823 class ChangeApplicationOwnerForm(ReactorForm):
901 password = wtforms.PasswordField(
824 password = wtforms.PasswordField(
902 "Password",
825 'Password',
903 filters=[strip_filter],
826 filters=[strip_filter],
904 validators=[old_password_validator, wtforms.validators.DataRequired()],
827 validators=[old_password_validator,
905 )
828 wtforms.validators.DataRequired()])
906
829
907 user_name = wtforms.StringField(
830 user_name = wtforms.StringField(
908 "New owners username",
831 'New owners username',
909 filters=[strip_filter],
832 filters=[strip_filter],
910 validators=[found_username_validator, wtforms.validators.DataRequired()],
833 validators=[found_username_validator,
911 )
834 wtforms.validators.DataRequired()])
912 submit = wtforms.SubmitField(_("Transfer ownership of application"))
835 submit = wtforms.SubmitField(_('Transfer ownership of application'))
913 ignore_labels = ["submit"]
836 ignore_labels = ['submit']
914 css_classes = {"submit": "btn btn-danger"}
837 css_classes = {'submit': 'btn btn-danger'}
915
838
916
839
917 def default_filename():
840 def default_filename():
918 return "Invoice %s" % datetime.datetime.utcnow().strftime("%Y/%m")
841 return 'Invoice %s' % datetime.datetime.utcnow().strftime('%Y/%m')
919
842
920
843
921 class FileUploadForm(ReactorForm):
844 class FileUploadForm(ReactorForm):
922 title = wtforms.StringField(
845 title = wtforms.StringField('File Title',
923 "File Title",
846 default=default_filename,
924 default=default_filename,
847 validators=[wtforms.validators.DataRequired()])
925 validators=[wtforms.validators.DataRequired()],
848 file = wtforms.FileField('File')
926 )
927 file = wtforms.FileField("File")
928
849
929 def validate_file(self, field):
850 def validate_file(self, field):
930 if not hasattr(field.data, "file"):
851 if not hasattr(field.data, 'file'):
931 raise wtforms.ValidationError("File is missing")
852 raise wtforms.ValidationError('File is missing')
932
853
933 submit = wtforms.SubmitField(_("Upload"))
854 submit = wtforms.SubmitField(_('Upload'))
934
855
935
856
936 def get_partition_deletion_form(es_indices, pg_indices):
857 def get_partition_deletion_form(es_indices, pg_indices):
937 class F(ReactorForm):
858 class F(ReactorForm):
938 es_index = wtforms.SelectMultipleField(
859 es_index = wtforms.SelectMultipleField('Elasticsearch',
939 "Elasticsearch", choices=[(ix, "") for ix in es_indices]
860 choices=[(ix, '') for ix in
940 )
861 es_indices])
941 pg_index = wtforms.SelectMultipleField(
862 pg_index = wtforms.SelectMultipleField('pg',
942 "pg", choices=[(ix, "") for ix in pg_indices]
863 choices=[(ix, '') for ix in
943 )
864 pg_indices])
944 confirm = wtforms.TextField(
865 confirm = wtforms.TextField('Confirm',
945 "Confirm",
866 filters=[uppercase_filter, strip_filter],
946 filters=[uppercase_filter, strip_filter],
867 validators=[
947 validators=[
868 wtforms.validators.AnyOf(['CONFIRM']),
948 wtforms.validators.AnyOf(["CONFIRM"]),
869 wtforms.validators.DataRequired()])
949 wtforms.validators.DataRequired(),
870 ignore_labels = ['submit']
950 ],
871 css_classes = {'submit': 'btn btn-danger'}
951 )
952 ignore_labels = ["submit"]
953 css_classes = {"submit": "btn btn-danger"}
954
872
955 return F
873 return F
956
874
957
875
958 class GroupCreateForm(ReactorForm):
876 class GroupCreateForm(ReactorForm):
959 group_name = wtforms.StringField(
877 group_name = wtforms.StringField(
960 _("Group Name"),
878 _('Group Name'),
961 filters=[strip_filter],
879 filters=[strip_filter],
962 validators=[
880 validators=[
963 wtforms.validators.Length(min=2, max=50),
881 wtforms.validators.Length(min=2, max=50),
964 unique_groupname_validator,
882 unique_groupname_validator,
965 wtforms.validators.DataRequired(),
883 wtforms.validators.DataRequired()
966 ],
884 ])
967 )
885 description = wtforms.StringField(_('Group description'))
968 description = wtforms.StringField(_("Group description"))
969
886
970
887
971 time_choices = [(k, v["label"]) for k, v in h.time_deltas.items()]
888 time_choices = [(k, v['label'],) for k, v in h.time_deltas.items()]
972
889
973
890
974 class AuthTokenCreateForm(ReactorForm):
891 class AuthTokenCreateForm(ReactorForm):
975 description = wtforms.StringField(_("Token description"))
892 description = wtforms.StringField(_('Token description'))
976 expires = wtforms.SelectField(
893 expires = wtforms.SelectField('Expires',
977 "Expires",
894 coerce=lambda x: x,
978 coerce=lambda x: x,
895 choices=time_choices,
979 choices=time_choices,
896 validators=[wtforms.validators.Optional()])
980 validators=[wtforms.validators.Optional()],
981 )
@@ -24,20 +24,20 b' from appenlight_client.exceptions import get_current_traceback'
24
24
25
25
26 def generate_random_string(chars=10):
26 def generate_random_string(chars=10):
27 return "".join(random.sample(string.ascii_letters * 2 + string.digits, chars))
27 return ''.join(random.sample(string.ascii_letters * 2 + string.digits,
28 chars))
28
29
29
30
30 def to_integer_safe(input):
31 def to_integer_safe(input):
31 try:
32 try:
32 return int(input)
33 return int(input)
33 except (TypeError, ValueError):
34 except (TypeError, ValueError,):
34 return None
35 return None
35
36
36
37
37 def print_traceback(log):
38 def print_traceback(log):
38 traceback = get_current_traceback(
39 traceback = get_current_traceback(skip=1, show_hidden_frames=True,
39 skip=1, show_hidden_frames=True, ignore_system_exceptions=True
40 ignore_system_exceptions=True)
40 )
41 exception_text = traceback.exception
41 exception_text = traceback.exception
42 log.error(exception_text)
42 log.error(exception_text)
43 log.error(traceback.plaintext)
43 log.error(traceback.plaintext)
@@ -45,5 +45,6 b' def print_traceback(log):'
45
45
46
46
47 def get_callable(import_string):
47 def get_callable(import_string):
48 import_module, indexer_callable = import_string.split(":")
48 import_module, indexer_callable = import_string.split(':')
49 return getattr(importlib.import_module(import_module), indexer_callable)
49 return getattr(importlib.import_module(import_module),
50 indexer_callable)
@@ -19,6 +19,7 b' import logging'
19
19
20 from pyramid.httpexceptions import HTTPForbidden, HTTPTooManyRequests
20 from pyramid.httpexceptions import HTTPForbidden, HTTPTooManyRequests
21
21
22 from appenlight.models import Datastores
22 from appenlight.models.services.config import ConfigService
23 from appenlight.models.services.config import ConfigService
23 from appenlight.lib.redis_keys import REDIS_KEYS
24 from appenlight.lib.redis_keys import REDIS_KEYS
24
25
@@ -27,18 +28,21 b' log = logging.getLogger(__name__)'
27
28
28 def rate_limiting(request, resource, section, to_increment=1):
29 def rate_limiting(request, resource, section, to_increment=1):
29 tsample = datetime.datetime.utcnow().replace(second=0, microsecond=0)
30 tsample = datetime.datetime.utcnow().replace(second=0, microsecond=0)
30 key = REDIS_KEYS["rate_limits"][section].format(tsample, resource.resource_id)
31 key = REDIS_KEYS['rate_limits'][section].format(tsample,
32 resource.resource_id)
31 redis_pipeline = request.registry.redis_conn.pipeline()
33 redis_pipeline = request.registry.redis_conn.pipeline()
32 redis_pipeline.incr(key, to_increment)
34 redis_pipeline.incr(key, to_increment)
33 redis_pipeline.expire(key, 3600 * 24)
35 redis_pipeline.expire(key, 3600 * 24)
34 results = redis_pipeline.execute()
36 results = redis_pipeline.execute()
35 current_count = results[0]
37 current_count = results[0]
36 config = ConfigService.by_key_and_section(section, "global")
38 config = ConfigService.by_key_and_section(section, 'global')
37 limit = config.value if config else 1000
39 limit = config.value if config else 1000
38 if current_count > int(limit):
40 if current_count > int(limit):
39 log.info("RATE LIMITING: {}: {}, {}".format(section, resource, current_count))
41 log.info('RATE LIMITING: {}: {}, {}'.format(
40 abort_msg = "Rate limits are in effect for this application"
42 section, resource, current_count))
41 raise HTTPTooManyRequests(abort_msg, headers={"X-AppEnlight": abort_msg})
43 abort_msg = 'Rate limits are in effect for this application'
44 raise HTTPTooManyRequests(abort_msg,
45 headers={'X-AppEnlight': abort_msg})
42
46
43
47
44 def check_cors(request, application, should_return=True):
48 def check_cors(request, application, should_return=True):
@@ -47,34 +51,31 b' def check_cors(request, application, should_return=True):'
47 application, otherwise return 403
51 application, otherwise return 403
48 """
52 """
49 origin_found = False
53 origin_found = False
50 origin = request.headers.get("Origin")
54 origin = request.headers.get('Origin')
51 if should_return:
55 if should_return:
52 log.info("CORS for %s" % origin)
56 log.info('CORS for %s' % origin)
53 if not origin:
57 if not origin:
54 return False
58 return False
55 for domain in application.domains.split("\n"):
59 for domain in application.domains.split('\n'):
56 if domain in origin:
60 if domain in origin:
57 origin_found = True
61 origin_found = True
58 if origin_found:
62 if origin_found:
59 request.response.headers.add("Access-Control-Allow-Origin", origin)
63 request.response.headers.add('Access-Control-Allow-Origin', origin)
60 request.response.headers.add("XDomainRequestAllowed", "1")
64 request.response.headers.add('XDomainRequestAllowed', '1')
61 request.response.headers.add(
65 request.response.headers.add('Access-Control-Allow-Methods',
62 "Access-Control-Allow-Methods", "GET, POST, OPTIONS"
66 'GET, POST, OPTIONS')
63 )
67 request.response.headers.add('Access-Control-Allow-Headers',
64 request.response.headers.add(
68 'Accept-Encoding, Accept-Language, '
65 "Access-Control-Allow-Headers",
69 'Content-Type, '
66 "Accept-Encoding, Accept-Language, "
70 'Depth, User-Agent, X-File-Size, '
67 "Content-Type, "
71 'X-Requested-With, If-Modified-Since, '
68 "Depth, User-Agent, X-File-Size, "
72 'X-File-Name, '
69 "X-Requested-With, If-Modified-Since, "
73 'Cache-Control, Host, Pragma, Accept, '
70 "X-File-Name, "
74 'Origin, Connection, '
71 "Cache-Control, Host, Pragma, Accept, "
75 'Referer, Cookie, '
72 "Origin, Connection, "
76 'X-appenlight-public-api-key, '
73 "Referer, Cookie, "
77 'x-appenlight-public-api-key')
74 "X-appenlight-public-api-key, "
78 request.response.headers.add('Access-Control-Max-Age', '86400')
75 "x-appenlight-public-api-key",
76 )
77 request.response.headers.add("Access-Control-Max-Age", "86400")
78 return request.response
79 return request.response
79 else:
80 else:
80 return HTTPForbidden()
81 return HTTPForbidden()
@@ -18,8 +18,7 b' import copy'
18 import hashlib
18 import hashlib
19 import inspect
19 import inspect
20
20
21 from dogpile.cache import make_region
21 from dogpile.cache import make_region, compat
22 from dogpile.cache.util import compat
23
22
24 regions = None
23 regions = None
25
24
@@ -42,27 +41,23 b' def hashgen(namespace, fn, to_str=compat.string_type):'
42 """
41 """
43
42
44 if namespace is None:
43 if namespace is None:
45 namespace = "%s:%s" % (fn.__module__, fn.__name__)
44 namespace = '%s:%s' % (fn.__module__, fn.__name__)
46 else:
45 else:
47 namespace = "%s:%s|%s" % (fn.__module__, fn.__name__, namespace)
46 namespace = '%s:%s|%s' % (fn.__module__, fn.__name__, namespace)
48
47
49 args = inspect.getargspec(fn)
48 args = inspect.getargspec(fn)
50 has_self = args[0] and args[0][0] in ("self", "cls")
49 has_self = args[0] and args[0][0] in ('self', 'cls')
51
50
52 def generate_key(*args, **kw):
51 def generate_key(*args, **kw):
53 if kw:
52 if kw:
54 raise ValueError(
53 raise ValueError(
55 "dogpile.cache's default key creation "
54 "dogpile.cache's default key creation "
56 "function does not accept keyword arguments."
55 "function does not accept keyword arguments.")
57 )
58 if has_self:
56 if has_self:
59 args = args[1:]
57 args = args[1:]
60
58
61 return (
59 return namespace + "|" + hashlib.sha1(
62 namespace
60 " ".join(map(to_str, args)).encode('utf8')).hexdigest()
63 + "|"
64 + hashlib.sha1(" ".join(map(to_str, args)).encode("utf8")).hexdigest()
65 )
66
61
67 return generate_key
62 return generate_key
68
63
@@ -72,97 +67,116 b' class CacheRegions(object):'
72 config_redis = {"arguments": settings}
67 config_redis = {"arguments": settings}
73
68
74 self.redis_min_1 = make_region(
69 self.redis_min_1 = make_region(
75 function_key_generator=hashgen, key_mangler=key_mangler
70 function_key_generator=hashgen,
76 ).configure(
71 key_mangler=key_mangler).configure(
77 "dogpile.cache.redis", expiration_time=60, **copy.deepcopy(config_redis)
72 "dogpile.cache.redis",
78 )
73 expiration_time=60,
74 **copy.deepcopy(config_redis))
79 self.redis_min_5 = make_region(
75 self.redis_min_5 = make_region(
80 function_key_generator=hashgen, key_mangler=key_mangler
76 function_key_generator=hashgen,
81 ).configure(
77 key_mangler=key_mangler).configure(
82 "dogpile.cache.redis", expiration_time=300, **copy.deepcopy(config_redis)
78 "dogpile.cache.redis",
83 )
79 expiration_time=300,
80 **copy.deepcopy(config_redis))
84
81
85 self.redis_min_10 = make_region(
82 self.redis_min_10 = make_region(
86 function_key_generator=hashgen, key_mangler=key_mangler
83 function_key_generator=hashgen,
87 ).configure(
84 key_mangler=key_mangler).configure(
88 "dogpile.cache.redis", expiration_time=60, **copy.deepcopy(config_redis)
85 "dogpile.cache.redis",
89 )
86 expiration_time=60,
87 **copy.deepcopy(config_redis))
90
88
91 self.redis_min_60 = make_region(
89 self.redis_min_60 = make_region(
92 function_key_generator=hashgen, key_mangler=key_mangler
90 function_key_generator=hashgen,
93 ).configure(
91 key_mangler=key_mangler).configure(
94 "dogpile.cache.redis", expiration_time=3600, **copy.deepcopy(config_redis)
92 "dogpile.cache.redis",
95 )
93 expiration_time=3600,
94 **copy.deepcopy(config_redis))
96
95
97 self.redis_sec_1 = make_region(
96 self.redis_sec_1 = make_region(
98 function_key_generator=hashgen, key_mangler=key_mangler
97 function_key_generator=hashgen,
99 ).configure(
98 key_mangler=key_mangler).configure(
100 "dogpile.cache.redis", expiration_time=1, **copy.deepcopy(config_redis)
99 "dogpile.cache.redis",
101 )
100 expiration_time=1,
101 **copy.deepcopy(config_redis))
102
102
103 self.redis_sec_5 = make_region(
103 self.redis_sec_5 = make_region(
104 function_key_generator=hashgen, key_mangler=key_mangler
104 function_key_generator=hashgen,
105 ).configure(
105 key_mangler=key_mangler).configure(
106 "dogpile.cache.redis", expiration_time=5, **copy.deepcopy(config_redis)
106 "dogpile.cache.redis",
107 )
107 expiration_time=5,
108 **copy.deepcopy(config_redis))
108
109
109 self.redis_sec_30 = make_region(
110 self.redis_sec_30 = make_region(
110 function_key_generator=hashgen, key_mangler=key_mangler
111 function_key_generator=hashgen,
111 ).configure(
112 key_mangler=key_mangler).configure(
112 "dogpile.cache.redis", expiration_time=30, **copy.deepcopy(config_redis)
113 "dogpile.cache.redis",
113 )
114 expiration_time=30,
115 **copy.deepcopy(config_redis))
114
116
115 self.redis_day_1 = make_region(
117 self.redis_day_1 = make_region(
116 function_key_generator=hashgen, key_mangler=key_mangler
118 function_key_generator=hashgen,
117 ).configure(
119 key_mangler=key_mangler).configure(
118 "dogpile.cache.redis", expiration_time=86400, **copy.deepcopy(config_redis)
120 "dogpile.cache.redis",
119 )
121 expiration_time=86400,
122 **copy.deepcopy(config_redis))
120
123
121 self.redis_day_7 = make_region(
124 self.redis_day_7 = make_region(
122 function_key_generator=hashgen, key_mangler=key_mangler
125 function_key_generator=hashgen,
123 ).configure(
126 key_mangler=key_mangler).configure(
124 "dogpile.cache.redis",
127 "dogpile.cache.redis",
125 expiration_time=86400 * 7,
128 expiration_time=86400 * 7,
126 **copy.deepcopy(config_redis)
129 **copy.deepcopy(config_redis))
127 )
128
130
129 self.redis_day_30 = make_region(
131 self.redis_day_30 = make_region(
130 function_key_generator=hashgen, key_mangler=key_mangler
132 function_key_generator=hashgen,
131 ).configure(
133 key_mangler=key_mangler).configure(
132 "dogpile.cache.redis",
134 "dogpile.cache.redis",
133 expiration_time=86400 * 30,
135 expiration_time=86400 * 30,
134 **copy.deepcopy(config_redis)
136 **copy.deepcopy(config_redis))
135 )
136
137
137 self.memory_day_1 = make_region(
138 self.memory_day_1 = make_region(
138 function_key_generator=hashgen, key_mangler=key_mangler
139 function_key_generator=hashgen,
139 ).configure(
140 key_mangler=key_mangler).configure(
140 "dogpile.cache.memory", expiration_time=86400, **copy.deepcopy(config_redis)
141 "dogpile.cache.memory",
141 )
142 expiration_time=86400,
143 **copy.deepcopy(config_redis))
142
144
143 self.memory_sec_1 = make_region(
145 self.memory_sec_1 = make_region(
144 function_key_generator=hashgen, key_mangler=key_mangler
146 function_key_generator=hashgen,
145 ).configure("dogpile.cache.memory", expiration_time=1)
147 key_mangler=key_mangler).configure(
148 "dogpile.cache.memory",
149 expiration_time=1)
146
150
147 self.memory_sec_5 = make_region(
151 self.memory_sec_5 = make_region(
148 function_key_generator=hashgen, key_mangler=key_mangler
152 function_key_generator=hashgen,
149 ).configure("dogpile.cache.memory", expiration_time=5)
153 key_mangler=key_mangler).configure(
154 "dogpile.cache.memory",
155 expiration_time=5)
150
156
151 self.memory_min_1 = make_region(
157 self.memory_min_1 = make_region(
152 function_key_generator=hashgen, key_mangler=key_mangler
158 function_key_generator=hashgen,
153 ).configure("dogpile.cache.memory", expiration_time=60)
159 key_mangler=key_mangler).configure(
160 "dogpile.cache.memory",
161 expiration_time=60)
154
162
155 self.memory_min_5 = make_region(
163 self.memory_min_5 = make_region(
156 function_key_generator=hashgen, key_mangler=key_mangler
164 function_key_generator=hashgen,
157 ).configure("dogpile.cache.memory", expiration_time=300)
165 key_mangler=key_mangler).configure(
166 "dogpile.cache.memory",
167 expiration_time=300)
158
168
159 self.memory_min_10 = make_region(
169 self.memory_min_10 = make_region(
160 function_key_generator=hashgen, key_mangler=key_mangler
170 function_key_generator=hashgen,
161 ).configure("dogpile.cache.memory", expiration_time=600)
171 key_mangler=key_mangler).configure(
172 "dogpile.cache.memory",
173 expiration_time=600)
162
174
163 self.memory_min_60 = make_region(
175 self.memory_min_60 = make_region(
164 function_key_generator=hashgen, key_mangler=key_mangler
176 function_key_generator=hashgen,
165 ).configure("dogpile.cache.memory", expiration_time=3600)
177 key_mangler=key_mangler).configure(
178 "dogpile.cache.memory",
179 expiration_time=3600)
166
180
167
181
168 def get_region(region):
182 def get_region(region):
@@ -5,7 +5,6 b' from pyramid.config import Configurator'
5
5
6 log = logging.getLogger(__name__)
6 log = logging.getLogger(__name__)
7
7
8
9 class InspectProxy(object):
8 class InspectProxy(object):
10 """
9 """
11 Proxy to the `inspect` module that allows us to use the pyramid include
10 Proxy to the `inspect` module that allows us to use the pyramid include
@@ -18,7 +17,7 b' class InspectProxy(object):'
18 """
17 """
19 if inspect.ismethod(cyfunction):
18 if inspect.ismethod(cyfunction):
20 cyfunction = cyfunction.im_func
19 cyfunction = cyfunction.im_func
21 return getattr(cyfunction, "func_code")
20 return getattr(cyfunction, 'func_code')
22
21
23 def getmodule(self, *args, **kwds):
22 def getmodule(self, *args, **kwds):
24 """
23 """
@@ -41,14 +40,14 b' class InspectProxy(object):'
41 """
40 """
42 # Check if it's called to look up the source file that contains the
41 # Check if it's called to look up the source file that contains the
43 # magic pyramid `includeme` callable.
42 # magic pyramid `includeme` callable.
44 if getattr(obj, "__name__") == "includeme":
43 if getattr(obj, '__name__') == 'includeme':
45 try:
44 try:
46 return inspect.getfile(obj)
45 return inspect.getfile(obj)
47 except TypeError as e:
46 except TypeError as e:
48 # Cython functions are not recognized as functions by the
47 # Cython functions are not recognized as functions by the
49 # inspect module. We have to unpack the func_code attribute
48 # inspect module. We have to unpack the func_code attribute
50 # ourself.
49 # ourself.
51 if "cyfunction" in e.message:
50 if 'cyfunction' in e.message:
52 obj = self._get_cyfunction_func_code(obj)
51 obj = self._get_cyfunction_func_code(obj)
53 return inspect.getfile(obj)
52 return inspect.getfile(obj)
54 raise
53 raise
@@ -61,32 +60,33 b' class CythonCompatConfigurator(Configurator):'
61 Customized configurator to replace the inspect class attribute with
60 Customized configurator to replace the inspect class attribute with
62 a custom one that is cython compatible.
61 a custom one that is cython compatible.
63 """
62 """
64
65 inspect = InspectProxy()
63 inspect = InspectProxy()
66
64
67
65
68 def register_appenlight_plugin(config, plugin_name, plugin_config):
66 def register_appenlight_plugin(config, plugin_name, plugin_config):
69 def register():
67 def register():
70 log.warning("Registering plugin: {}".format(plugin_name))
68 log.warning('Registering plugin: {}'.format(plugin_name))
71 if plugin_name not in config.registry.appenlight_plugins:
69 if plugin_name not in config.registry.appenlight_plugins:
72 config.registry.appenlight_plugins[plugin_name] = {
70 config.registry.appenlight_plugins[plugin_name] = {
73 "javascript": None,
71 'javascript': None,
74 "static": None,
72 'static': None,
75 "css": None,
73 'css': None,
76 "celery_tasks": None,
74 'celery_tasks': None,
77 "celery_beats": None,
75 'celery_beats': None,
78 "fulltext_indexer": None,
76 'fulltext_indexer': None,
79 "sqlalchemy_migrations": None,
77 'sqlalchemy_migrations': None,
80 "default_values_setter": None,
78 'default_values_setter': None,
81 "header_html": None,
79 'header_html': None,
82 "resource_types": [],
80 'resource_types': [],
83 "url_gen": None,
81 'url_gen': None
84 }
82 }
85 config.registry.appenlight_plugins[plugin_name].update(plugin_config)
83 config.registry.appenlight_plugins[plugin_name].update(
84 plugin_config)
86 # inform AE what kind of resource types we have available
85 # inform AE what kind of resource types we have available
87 # so we can avoid failing when a plugin is removed but data
86 # so we can avoid failing when a plugin is removed but data
88 # is still present in the db
87 # is still present in the db
89 if plugin_config.get("resource_types"):
88 if plugin_config.get('resource_types'):
90 config.registry.resource_types.extend(plugin_config["resource_types"])
89 config.registry.resource_types.extend(
90 plugin_config['resource_types'])
91
91
92 config.action("appenlight_plugin={}".format(plugin_name), register)
92 config.action('appenlight_plugin={}'.format(plugin_name), register)
@@ -23,20 +23,20 b' ENCRYPTION_SECRET = None'
23 def encrypt_fernet(value):
23 def encrypt_fernet(value):
24 # avoid double encryption
24 # avoid double encryption
25 # not sure if this is needed but it won't hurt too much to have this
25 # not sure if this is needed but it won't hurt too much to have this
26 if value.startswith("enc$fernet$"):
26 if value.startswith('enc$fernet$'):
27 return value
27 return value
28 f = Fernet(ENCRYPTION_SECRET)
28 f = Fernet(ENCRYPTION_SECRET)
29 return "enc$fernet${}".format(f.encrypt(value.encode("utf8")).decode("utf8"))
29 return 'enc$fernet${}'.format(f.encrypt(value.encode('utf8')).decode('utf8'))
30
30
31
31
32 def decrypt_fernet(value):
32 def decrypt_fernet(value):
33 parts = value.split("$", 3)
33 parts = value.split('$', 3)
34 if not len(parts) == 3:
34 if not len(parts) == 3:
35 # not encrypted values
35 # not encrypted values
36 return value
36 return value
37 else:
37 else:
38 f = Fernet(ENCRYPTION_SECRET)
38 f = Fernet(ENCRYPTION_SECRET)
39 decrypted_data = f.decrypt(parts[2].encode("utf8")).decode("utf8")
39 decrypted_data = f.decrypt(parts[2].encode('utf8')).decode('utf8')
40 return decrypted_data
40 return decrypted_data
41
41
42
42
@@ -1,5 +1,4 b''
1 import collections
1 import collections
2
3 # -*- coding: utf-8 -*-
2 # -*- coding: utf-8 -*-
4
3
5 # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors
4 # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors
@@ -21,14 +20,13 b' class StupidEnum(object):'
21 @classmethod
20 @classmethod
22 def set_inverse(cls):
21 def set_inverse(cls):
23 cls._inverse_values = dict(
22 cls._inverse_values = dict(
24 (y, x)
23 (y, x) for x, y in vars(cls).items() if
25 for x, y in vars(cls).items()
24 not x.startswith('_') and not callable(y)
26 if not x.startswith("_") and not callable(y)
27 )
25 )
28
26
29 @classmethod
27 @classmethod
30 def key_from_value(cls, value):
28 def key_from_value(cls, value):
31 if not hasattr(cls, "_inverse_values"):
29 if not hasattr(cls, '_inverse_values'):
32 cls.set_inverse()
30 cls.set_inverse()
33 return cls._inverse_values.get(value)
31 return cls._inverse_values.get(value)
34
32
@@ -25,7 +25,7 b' import functools'
25 import decimal
25 import decimal
26 import imp
26 import imp
27
27
28 __all__ = ["json", "simplejson", "stdlibjson"]
28 __all__ = ['json', 'simplejson', 'stdlibjson']
29
29
30
30
31 def _is_aware(value):
31 def _is_aware(value):
@@ -35,7 +35,8 b' def _is_aware(value):'
35 The logic is described in Python's docs:
35 The logic is described in Python's docs:
36 http://docs.python.org/library/datetime.html#datetime.tzinfo
36 http://docs.python.org/library/datetime.html#datetime.tzinfo
37 """
37 """
38 return value.tzinfo is not None and value.tzinfo.utcoffset(value) is not None
38 return (value.tzinfo is not None
39 and value.tzinfo.utcoffset(value) is not None)
39
40
40
41
41 def _obj_dump(obj):
42 def _obj_dump(obj):
@@ -54,8 +55,8 b' def _obj_dump(obj):'
54 r = obj.isoformat()
55 r = obj.isoformat()
55 # if obj.microsecond:
56 # if obj.microsecond:
56 # r = r[:23] + r[26:]
57 # r = r[:23] + r[26:]
57 if r.endswith("+00:00"):
58 if r.endswith('+00:00'):
58 r = r[:-6] + "Z"
59 r = r[:-6] + 'Z'
59 return r
60 return r
60 elif isinstance(obj, datetime.date):
61 elif isinstance(obj, datetime.date):
61 return obj.isoformat()
62 return obj.isoformat()
@@ -70,7 +71,7 b' def _obj_dump(obj):'
70 return r
71 return r
71 elif isinstance(obj, set):
72 elif isinstance(obj, set):
72 return list(obj)
73 return list(obj)
73 elif hasattr(obj, "__json__"):
74 elif hasattr(obj, '__json__'):
74 if callable(obj.__json__):
75 if callable(obj.__json__):
75 return obj.__json__()
76 return obj.__json__()
76 else:
77 else:
@@ -82,7 +83,8 b' def _obj_dump(obj):'
82 # Import simplejson
83 # Import simplejson
83 try:
84 try:
84 # import simplejson initially
85 # import simplejson initially
85 _sj = imp.load_module("_sj", *imp.find_module("simplejson"))
86 _sj = imp.load_module('_sj', *imp.find_module('simplejson'))
87
86
88
87 def extended_encode(obj):
89 def extended_encode(obj):
88 try:
90 try:
@@ -91,21 +93,22 b' try:'
91 pass
93 pass
92 raise TypeError("%r is not JSON serializable" % (obj,))
94 raise TypeError("%r is not JSON serializable" % (obj,))
93
95
96
94 # we handle decimals our own it makes unified behavior of json vs
97 # we handle decimals our own it makes unified behavior of json vs
95 # simplejson
98 # simplejson
96 sj_version = [int(x) for x in _sj.__version__.split(".")]
99 sj_version = [int(x) for x in _sj.__version__.split('.')]
97 major, minor = sj_version[0], sj_version[1]
100 major, minor = sj_version[0], sj_version[1]
98 if major < 2 or (major == 2 and minor < 1):
101 if major < 2 or (major == 2 and minor < 1):
99 # simplejson < 2.1 doesnt support use_decimal
102 # simplejson < 2.1 doesnt support use_decimal
100 _sj.dumps = functools.partial(_sj.dumps, default=extended_encode)
103 _sj.dumps = functools.partial(
101 _sj.dump = functools.partial(_sj.dump, default=extended_encode)
104 _sj.dumps, default=extended_encode)
105 _sj.dump = functools.partial(
106 _sj.dump, default=extended_encode)
102 else:
107 else:
103 _sj.dumps = functools.partial(
108 _sj.dumps = functools.partial(
104 _sj.dumps, default=extended_encode, use_decimal=False
109 _sj.dumps, default=extended_encode, use_decimal=False)
105 )
106 _sj.dump = functools.partial(
110 _sj.dump = functools.partial(
107 _sj.dump, default=extended_encode, use_decimal=False
111 _sj.dump, default=extended_encode, use_decimal=False)
108 )
109 simplejson = _sj
112 simplejson = _sj
110
113
111 except ImportError:
114 except ImportError:
@@ -114,7 +117,8 b' except ImportError:'
114
117
115 try:
118 try:
116 # simplejson not found try out regular json module
119 # simplejson not found try out regular json module
117 _json = imp.load_module("_json", *imp.find_module("json"))
120 _json = imp.load_module('_json', *imp.find_module('json'))
121
118
122
119 # extended JSON encoder for json
123 # extended JSON encoder for json
120 class ExtendedEncoder(_json.JSONEncoder):
124 class ExtendedEncoder(_json.JSONEncoder):
@@ -125,6 +129,7 b' try:'
125 pass
129 pass
126 raise TypeError("%r is not JSON serializable" % (obj,))
130 raise TypeError("%r is not JSON serializable" % (obj,))
127
131
132
128 # monkey-patch JSON encoder to use extended version
133 # monkey-patch JSON encoder to use extended version
129 _json.dumps = functools.partial(_json.dumps, cls=ExtendedEncoder)
134 _json.dumps = functools.partial(_json.dumps, cls=ExtendedEncoder)
130 _json.dump = functools.partial(_json.dump, cls=ExtendedEncoder)
135 _json.dump = functools.partial(_json.dump, cls=ExtendedEncoder)
@@ -140,4 +145,4 b' if simplejson:'
140 elif _json:
145 elif _json:
141 json = _json
146 json = _json
142 else:
147 else:
143 raise ImportError("Could not find any json modules")
148 raise ImportError('Could not find any json modules')
@@ -26,135 +26,94 b' _ = lambda x: x'
26
26
27 time_deltas = OrderedDict()
27 time_deltas = OrderedDict()
28
28
29 time_deltas["1m"] = {
29 time_deltas['1m'] = {'delta': datetime.timedelta(minutes=1),
30 "delta": datetime.timedelta(minutes=1),
30 'label': '1 minute', 'minutes': 1}
31 "label": "1 minute",
31
32 "minutes": 1,
32 time_deltas['5m'] = {'delta': datetime.timedelta(minutes=5),
33 }
33 'label': '5 minutes', 'minutes': 5}
34
34 time_deltas['30m'] = {'delta': datetime.timedelta(minutes=30),
35 time_deltas["5m"] = {
35 'label': '30 minutes', 'minutes': 30}
36 "delta": datetime.timedelta(minutes=5),
36 time_deltas['1h'] = {'delta': datetime.timedelta(hours=1),
37 "label": "5 minutes",
37 'label': '60 minutes', 'minutes': 60}
38 "minutes": 5,
38 time_deltas['4h'] = {'delta': datetime.timedelta(hours=4), 'label': '4 hours',
39 }
39 'minutes': 60 * 4}
40 time_deltas["30m"] = {
40 time_deltas['12h'] = {'delta': datetime.timedelta(hours=12),
41 "delta": datetime.timedelta(minutes=30),
41 'label': '12 hours', 'minutes': 60 * 12}
42 "label": "30 minutes",
42 time_deltas['24h'] = {'delta': datetime.timedelta(hours=24),
43 "minutes": 30,
43 'label': '24 hours', 'minutes': 60 * 24}
44 }
44 time_deltas['3d'] = {'delta': datetime.timedelta(days=3), 'label': '3 days',
45 time_deltas["1h"] = {
45 'minutes': 60 * 24 * 3}
46 "delta": datetime.timedelta(hours=1),
46 time_deltas['1w'] = {'delta': datetime.timedelta(days=7), 'label': '7 days',
47 "label": "60 minutes",
47 'minutes': 60 * 24 * 7}
48 "minutes": 60,
48 time_deltas['2w'] = {'delta': datetime.timedelta(days=14), 'label': '14 days',
49 }
49 'minutes': 60 * 24 * 14}
50 time_deltas["4h"] = {
50 time_deltas['1M'] = {'delta': datetime.timedelta(days=31), 'label': '31 days',
51 "delta": datetime.timedelta(hours=4),
51 'minutes': 60 * 24 * 31}
52 "label": "4 hours",
52 time_deltas['3M'] = {'delta': datetime.timedelta(days=31 * 3),
53 "minutes": 60 * 4,
53 'label': '3 months',
54 }
54 'minutes': 60 * 24 * 31 * 3}
55 time_deltas["12h"] = {
55 time_deltas['6M'] = {'delta': datetime.timedelta(days=31 * 6),
56 "delta": datetime.timedelta(hours=12),
56 'label': '6 months',
57 "label": "12 hours",
57 'minutes': 60 * 24 * 31 * 6}
58 "minutes": 60 * 12,
58 time_deltas['12M'] = {'delta': datetime.timedelta(days=31 * 12),
59 }
59 'label': '12 months',
60 time_deltas["24h"] = {
60 'minutes': 60 * 24 * 31 * 12}
61 "delta": datetime.timedelta(hours=24),
62 "label": "24 hours",
63 "minutes": 60 * 24,
64 }
65 time_deltas["3d"] = {
66 "delta": datetime.timedelta(days=3),
67 "label": "3 days",
68 "minutes": 60 * 24 * 3,
69 }
70 time_deltas["1w"] = {
71 "delta": datetime.timedelta(days=7),
72 "label": "7 days",
73 "minutes": 60 * 24 * 7,
74 }
75 time_deltas["2w"] = {
76 "delta": datetime.timedelta(days=14),
77 "label": "14 days",
78 "minutes": 60 * 24 * 14,
79 }
80 time_deltas["1M"] = {
81 "delta": datetime.timedelta(days=31),
82 "label": "31 days",
83 "minutes": 60 * 24 * 31,
84 }
85 time_deltas["3M"] = {
86 "delta": datetime.timedelta(days=31 * 3),
87 "label": "3 months",
88 "minutes": 60 * 24 * 31 * 3,
89 }
90 time_deltas["6M"] = {
91 "delta": datetime.timedelta(days=31 * 6),
92 "label": "6 months",
93 "minutes": 60 * 24 * 31 * 6,
94 }
95 time_deltas["12M"] = {
96 "delta": datetime.timedelta(days=31 * 12),
97 "label": "12 months",
98 "minutes": 60 * 24 * 31 * 12,
99 }
100
61
101 # used in json representation
62 # used in json representation
102 time_options = dict(
63 time_options = dict([(k, {'label': v['label'], 'minutes': v['minutes']})
103 [
64 for k, v in time_deltas.items()])
104 (k, {"label": v["label"], "minutes": v["minutes"]})
65 FlashMsg = namedtuple('FlashMsg', ['msg', 'level'])
105 for k, v in time_deltas.items()
106 ]
107 )
108 FlashMsg = namedtuple("FlashMsg", ["msg", "level"])
109
66
110
67
111 def get_flash(request):
68 def get_flash(request):
112 messages = []
69 messages = []
113 messages.extend(
70 messages.extend(
114 [FlashMsg(msg, "error") for msg in request.session.peek_flash("error")]
71 [FlashMsg(msg, 'error')
115 )
72 for msg in request.session.peek_flash('error')])
73 messages.extend([FlashMsg(msg, 'warning')
74 for msg in request.session.peek_flash('warning')])
116 messages.extend(
75 messages.extend(
117 [FlashMsg(msg, "warning") for msg in request.session.peek_flash("warning")]
76 [FlashMsg(msg, 'notice') for msg in request.session.peek_flash()])
118 )
119 messages.extend([FlashMsg(msg, "notice") for msg in request.session.peek_flash()])
120 return messages
77 return messages
121
78
122
79
123 def clear_flash(request):
80 def clear_flash(request):
124 request.session.pop_flash("error")
81 request.session.pop_flash('error')
125 request.session.pop_flash("warning")
82 request.session.pop_flash('warning')
126 request.session.pop_flash()
83 request.session.pop_flash()
127
84
128
85
129 def get_type_formatted_flash(request):
86 def get_type_formatted_flash(request):
130 return [
87 return [{'msg': message.msg, 'type': message.level}
131 {"msg": message.msg, "type": message.level} for message in get_flash(request)
88 for message in get_flash(request)]
132 ]
133
89
134
90
135 def gen_pagination_headers(request, paginator):
91 def gen_pagination_headers(request, paginator):
136 headers = {
92 headers = {
137 "x-total-count": str(paginator.item_count),
93 'x-total-count': str(paginator.item_count),
138 "x-current-page": str(paginator.page),
94 'x-current-page': str(paginator.page),
139 "x-items-per-page": str(paginator.items_per_page),
95 'x-items-per-page': str(paginator.items_per_page)
140 }
96 }
141 params_dict = request.GET.dict_of_lists()
97 params_dict = request.GET.dict_of_lists()
142 last_page_params = copy.deepcopy(params_dict)
98 last_page_params = copy.deepcopy(params_dict)
143 last_page_params["page"] = paginator.last_page or 1
99 last_page_params['page'] = paginator.last_page or 1
144 first_page_params = copy.deepcopy(params_dict)
100 first_page_params = copy.deepcopy(params_dict)
145 first_page_params.pop("page", None)
101 first_page_params.pop('page', None)
146 next_page_params = copy.deepcopy(params_dict)
102 next_page_params = copy.deepcopy(params_dict)
147 next_page_params["page"] = paginator.next_page or paginator.last_page or 1
103 next_page_params['page'] = paginator.next_page or paginator.last_page or 1
148 prev_page_params = copy.deepcopy(params_dict)
104 prev_page_params = copy.deepcopy(params_dict)
149 prev_page_params["page"] = paginator.previous_page or 1
105 prev_page_params['page'] = paginator.previous_page or 1
150 lp_url = request.current_route_url(_query=last_page_params)
106 lp_url = request.current_route_url(_query=last_page_params)
151 fp_url = request.current_route_url(_query=first_page_params)
107 fp_url = request.current_route_url(_query=first_page_params)
152 links = ['rel="last", <{}>'.format(lp_url), 'rel="first", <{}>'.format(fp_url)]
108 links = [
109 'rel="last", <{}>'.format(lp_url),
110 'rel="first", <{}>'.format(fp_url),
111 ]
153 if first_page_params != prev_page_params:
112 if first_page_params != prev_page_params:
154 prev_url = request.current_route_url(_query=prev_page_params)
113 prev_url = request.current_route_url(_query=prev_page_params)
155 links.append('rel="prev", <{}>'.format(prev_url))
114 links.append('rel="prev", <{}>'.format(prev_url))
156 if last_page_params != next_page_params:
115 if last_page_params != next_page_params:
157 next_url = request.current_route_url(_query=next_page_params)
116 next_url = request.current_route_url(_query=next_page_params)
158 links.append('rel="next", <{}>'.format(next_url))
117 links.append('rel="next", <{}>'.format(next_url))
159 headers["link"] = "; ".join(links)
118 headers['link'] = '; '.join(links)
160 return headers
119 return headers
@@ -18,21 +18,17 b' import re'
18 from appenlight.lib.ext_json import json
18 from appenlight.lib.ext_json import json
19 from jinja2 import Markup, escape, evalcontextfilter
19 from jinja2 import Markup, escape, evalcontextfilter
20
20
21 _paragraph_re = re.compile(r"(?:\r\n|\r|\n){2,}")
21 _paragraph_re = re.compile(r'(?:\r\n|\r|\n){2,}')
22
22
23
23
24 @evalcontextfilter
24 @evalcontextfilter
25 def nl2br(eval_ctx, value):
25 def nl2br(eval_ctx, value):
26 if eval_ctx.autoescape:
26 if eval_ctx.autoescape:
27 result = "\n\n".join(
27 result = '\n\n'.join('<p>%s</p>' % p.replace('\n', Markup('<br>\n'))
28 "<p>%s</p>" % p.replace("\n", Markup("<br>\n"))
28 for p in _paragraph_re.split(escape(value)))
29 for p in _paragraph_re.split(escape(value))
30 )
31 else:
29 else:
32 result = "\n\n".join(
30 result = '\n\n'.join('<p>%s</p>' % p.replace('\n', '<br>\n')
33 "<p>%s</p>" % p.replace("\n", "<br>\n")
31 for p in _paragraph_re.split(escape(value)))
34 for p in _paragraph_re.split(escape(value))
35 )
36 if eval_ctx.autoescape:
32 if eval_ctx.autoescape:
37 result = Markup(result)
33 result = Markup(result)
38 return result
34 return result
@@ -40,14 +36,11 b' def nl2br(eval_ctx, value):'
40
36
41 @evalcontextfilter
37 @evalcontextfilter
42 def toJSONUnsafe(eval_ctx, value):
38 def toJSONUnsafe(eval_ctx, value):
43 encoded = (
39 encoded = json.dumps(value).replace('&', '\\u0026') \
44 json.dumps(value)
40 .replace('<', '\\u003c') \
45 .replace("&", "\\u0026")
41 .replace('>', '\\u003e') \
46 .replace("<", "\\u003c")
42 .replace('>', '\\u003e') \
47 .replace(">", "\\u003e")
43 .replace('"', '\\u0022') \
48 .replace(">", "\\u003e")
44 .replace("'", '\\u0027') \
49 .replace('"', "\\u0022")
45 .replace(r'\n', '/\\\n')
50 .replace("'", "\\u0027")
51 .replace(r"\n", "/\\\n")
52 )
53 return Markup("'%s'" % encoded)
46 return Markup("'%s'" % encoded)
@@ -17,30 +17,11 b''
17 import json
17 import json
18 import logging
18 import logging
19
19
20 ignored_keys = [
20 ignored_keys = ['args', 'asctime', 'created', 'exc_info', 'exc_text',
21 "args",
21 'filename', 'funcName', 'levelname', 'levelno', 'lineno',
22 "asctime",
22 'message', 'module', 'msecs', 'msg', 'name', 'pathname',
23 "created",
23 'process', 'processName', 'relativeCreated', 'stack_info',
24 "exc_info",
24 'thread', 'threadName']
25 "exc_text",
26 "filename",
27 "funcName",
28 "levelname",
29 "levelno",
30 "lineno",
31 "message",
32 "module",
33 "msecs",
34 "msg",
35 "name",
36 "pathname",
37 "process",
38 "processName",
39 "relativeCreated",
40 "stack_info",
41 "thread",
42 "threadName",
43 ]
44
25
45
26
46 class JSONFormatter(logging.Formatter):
27 class JSONFormatter(logging.Formatter):
@@ -60,7 +41,7 b' class JSONFormatter(logging.Formatter):'
60 record.message = record.getMessage()
41 record.message = record.getMessage()
61 log_dict = vars(record)
42 log_dict = vars(record)
62 keys = [k for k in log_dict.keys() if k not in ignored_keys]
43 keys = [k for k in log_dict.keys() if k not in ignored_keys]
63 payload = {"message": record.message}
44 payload = {'message': record.message}
64 payload.update({k: log_dict[k] for k in keys})
45 payload.update({k: log_dict[k] for k in keys})
65 record.message = json.dumps(payload, default=lambda x: str(x))
46 record.message = json.dumps(payload, default=lambda x: str(x))
66
47
@@ -14,56 +14,52 b''
14 # See the License for the specific language governing permissions and
14 # See the License for the specific language governing permissions and
15 # limitations under the License.
15 # limitations under the License.
16
16
17 BASE = "appenlight:data:{}"
17 BASE = 'appenlight:data:{}'
18
18
19 REDIS_KEYS = {
19 REDIS_KEYS = {
20 "tasks": {
20 'tasks': {
21 "add_reports_lock": BASE.format("add_reports_lock:{}"),
21 'add_reports_lock': BASE.format('add_reports_lock:{}'),
22 "add_logs_lock": BASE.format("add_logs_lock:{}"),
22 'add_logs_lock': BASE.format('add_logs_lock:{}'),
23 },
23 },
24 "counters": {
24 'counters': {
25 "events_per_minute_per_user": BASE.format("events_per_minute_per_user:{}:{}"),
25 'events_per_minute_per_user': BASE.format(
26 "reports_per_minute": BASE.format("reports_per_minute:{}"),
26 'events_per_minute_per_user:{}:{}'),
27 "reports_per_hour_per_app": BASE.format("reports_per_hour_per_app:{}:{}"),
27 'reports_per_minute': BASE.format('reports_per_minute:{}'),
28 "reports_per_type": BASE.format("reports_per_type:{}"),
28 'reports_per_hour_per_app': BASE.format(
29 "logs_per_minute": BASE.format("logs_per_minute:{}"),
29 'reports_per_hour_per_app:{}:{}'),
30 "logs_per_hour_per_app": BASE.format("logs_per_hour_per_app:{}:{}"),
30 'reports_per_type': BASE.format('reports_per_type:{}'),
31 "metrics_per_minute": BASE.format("metrics_per_minute:{}"),
31 'logs_per_minute': BASE.format('logs_per_minute:{}'),
32 "metrics_per_hour_per_app": BASE.format("metrics_per_hour_per_app:{}:{}"),
32 'logs_per_hour_per_app': BASE.format(
33 "report_group_occurences": BASE.format("report_group_occurences:{}"),
33 'logs_per_hour_per_app:{}:{}'),
34 "report_group_occurences_alerting": BASE.format(
34 'metrics_per_minute': BASE.format('metrics_per_minute:{}'),
35 "report_group_occurences_alerting:{}"
35 'metrics_per_hour_per_app': BASE.format(
36 ),
36 'metrics_per_hour_per_app:{}:{}'),
37 "report_group_occurences_10th": BASE.format("report_group_occurences_10th:{}"),
37 'report_group_occurences': BASE.format('report_group_occurences:{}'),
38 "report_group_occurences_100th": BASE.format(
38 'report_group_occurences_alerting': BASE.format(
39 "report_group_occurences_100th:{}"
39 'report_group_occurences_alerting:{}'),
40 ),
40 'report_group_occurences_10th': BASE.format(
41 'report_group_occurences_10th:{}'),
42 'report_group_occurences_100th': BASE.format(
43 'report_group_occurences_100th:{}'),
41 },
44 },
42 "rate_limits": {
45 'rate_limits': {
43 "per_application_reports_rate_limit": BASE.format(
46 'per_application_reports_rate_limit': BASE.format(
44 "per_application_reports_limit:{}:{}"
47 'per_application_reports_limit:{}:{}'),
45 ),
48 'per_application_logs_rate_limit': BASE.format(
46 "per_application_logs_rate_limit": BASE.format(
49 'per_application_logs_rate_limit:{}:{}'),
47 "per_application_logs_rate_limit:{}:{}"
50 'per_application_metrics_rate_limit': BASE.format(
48 ),
51 'per_application_metrics_rate_limit:{}:{}'),
49 "per_application_metrics_rate_limit": BASE.format(
50 "per_application_metrics_rate_limit:{}:{}"
51 ),
52 },
52 },
53 "apps_that_got_new_data_per_hour": BASE.format(
53 'apps_that_got_new_data_per_hour': BASE.format('apps_that_got_new_data_per_hour:{}'),
54 "apps_that_got_new_data_per_hour:{}"
54 'apps_that_had_reports': BASE.format('apps_that_had_reports'),
55 ),
55 'apps_that_had_error_reports': BASE.format('apps_that_had_error_reports'),
56 "apps_that_had_reports": BASE.format("apps_that_had_reports"),
56 'apps_that_had_reports_alerting': BASE.format(
57 "apps_that_had_error_reports": BASE.format("apps_that_had_error_reports"),
57 'apps_that_had_reports_alerting'),
58 "apps_that_had_reports_alerting": BASE.format("apps_that_had_reports_alerting"),
58 'apps_that_had_error_reports_alerting': BASE.format(
59 "apps_that_had_error_reports_alerting": BASE.format(
59 'apps_that_had_error_reports_alerting'),
60 "apps_that_had_error_reports_alerting"
60 'reports_to_notify_per_type_per_app': BASE.format(
61 ),
61 'reports_to_notify_per_type_per_app:{}:{}'),
62 "reports_to_notify_per_type_per_app": BASE.format(
62 'reports_to_notify_per_type_per_app_alerting': BASE.format(
63 "reports_to_notify_per_type_per_app:{}:{}"
63 'reports_to_notify_per_type_per_app_alerting:{}:{}'),
64 ),
64 'seen_tag_list': BASE.format('seen_tag_list')
65 "reports_to_notify_per_type_per_app_alerting": BASE.format(
66 "reports_to_notify_per_type_per_app_alerting:{}:{}"
67 ),
68 "seen_tag_list": BASE.format("seen_tag_list"),
69 }
65 }
@@ -22,7 +22,7 b' import appenlight.lib.helpers as helpers'
22
22
23 from authomatic.providers import oauth2, oauth1
23 from authomatic.providers import oauth2, oauth1
24 from authomatic import Authomatic
24 from authomatic import Authomatic
25 from ziggurat_foundations.models.services.user import UserService
25 from appenlight.models.user import User
26
26
27
27
28 class CSRFException(Exception):
28 class CSRFException(Exception):
@@ -54,11 +54,11 b' def unsafe_json_body(request):'
54 try:
54 try:
55 return request.json_body
55 return request.json_body
56 except ValueError:
56 except ValueError:
57 raise JSONException("Incorrect JSON")
57 raise JSONException('Incorrect JSON')
58
58
59
59
60 def get_user(request):
60 def get_user(request):
61 if not request.path_info.startswith("/static"):
61 if not request.path_info.startswith('/static'):
62 user_id = unauthenticated_userid(request)
62 user_id = unauthenticated_userid(request)
63 try:
63 try:
64 user_id = int(user_id)
64 user_id = int(user_id)
@@ -66,12 +66,10 b' def get_user(request):'
66 return None
66 return None
67
67
68 if user_id:
68 if user_id:
69 user = UserService.by_id(user_id)
69 user = User.by_id(user_id)
70 if user:
70 if user:
71 request.environ["appenlight.username"] = "%d:%s" % (
71 request.environ['appenlight.username'] = '%d:%s' % (
72 user_id,
72 user_id, user.user_name)
73 user.user_name,
74 )
75 return user
73 return user
76 else:
74 else:
77 return None
75 return None
@@ -87,7 +85,7 b' def add_flash_to_headers(request, clear=True):'
87 flash queue
85 flash queue
88 """
86 """
89 flash_msgs = helpers.get_type_formatted_flash(request)
87 flash_msgs = helpers.get_type_formatted_flash(request)
90 request.response.headers["x-flash-messages"] = json.dumps(flash_msgs)
88 request.response.headers['x-flash-messages'] = json.dumps(flash_msgs)
91 helpers.clear_flash(request)
89 helpers.clear_flash(request)
92
90
93
91
@@ -96,36 +94,42 b' def get_authomatic(request):'
96 # authomatic social auth
94 # authomatic social auth
97 authomatic_conf = {
95 authomatic_conf = {
98 # callback http://yourapp.com/social_auth/twitter
96 # callback http://yourapp.com/social_auth/twitter
99 "twitter": {
97 'twitter': {
100 "class_": oauth1.Twitter,
98 'class_': oauth1.Twitter,
101 "consumer_key": settings.get("authomatic.pr.twitter.key", ""),
99 'consumer_key': settings.get('authomatic.pr.twitter.key', ''),
102 "consumer_secret": settings.get("authomatic.pr.twitter.secret", ""),
100 'consumer_secret': settings.get('authomatic.pr.twitter.secret',
101 ''),
103 },
102 },
104 # callback http://yourapp.com/social_auth/facebook
103 # callback http://yourapp.com/social_auth/facebook
105 "facebook": {
104 'facebook': {
106 "class_": oauth2.Facebook,
105 'class_': oauth2.Facebook,
107 "consumer_key": settings.get("authomatic.pr.facebook.app_id", ""),
106 'consumer_key': settings.get('authomatic.pr.facebook.app_id', ''),
108 "consumer_secret": settings.get("authomatic.pr.facebook.secret", ""),
107 'consumer_secret': settings.get('authomatic.pr.facebook.secret',
109 "scope": ["email"],
108 ''),
109 'scope': ['email'],
110 },
110 },
111 # callback http://yourapp.com/social_auth/google
111 # callback http://yourapp.com/social_auth/google
112 "google": {
112 'google': {
113 "class_": oauth2.Google,
113 'class_': oauth2.Google,
114 "consumer_key": settings.get("authomatic.pr.google.key", ""),
114 'consumer_key': settings.get('authomatic.pr.google.key', ''),
115 "consumer_secret": settings.get("authomatic.pr.google.secret", ""),
115 'consumer_secret': settings.get(
116 "scope": ["profile", "email"],
116 'authomatic.pr.google.secret', ''),
117 'scope': ['profile', 'email'],
117 },
118 },
118 "github": {
119 'github': {
119 "class_": oauth2.GitHub,
120 'class_': oauth2.GitHub,
120 "consumer_key": settings.get("authomatic.pr.github.key", ""),
121 'consumer_key': settings.get('authomatic.pr.github.key', ''),
121 "consumer_secret": settings.get("authomatic.pr.github.secret", ""),
122 'consumer_secret': settings.get(
122 "scope": ["repo", "public_repo", "user:email"],
123 'authomatic.pr.github.secret', ''),
123 "access_headers": {"User-Agent": "AppEnlight"},
124 'scope': ['repo', 'public_repo', 'user:email'],
124 },
125 'access_headers': {'User-Agent': 'AppEnlight'},
125 "bitbucket": {
126 "class_": oauth1.Bitbucket,
127 "consumer_key": settings.get("authomatic.pr.bitbucket.key", ""),
128 "consumer_secret": settings.get("authomatic.pr.bitbucket.secret", ""),
129 },
126 },
127 'bitbucket': {
128 'class_': oauth1.Bitbucket,
129 'consumer_key': settings.get('authomatic.pr.bitbucket.key', ''),
130 'consumer_secret': settings.get(
131 'authomatic.pr.bitbucket.secret', '')
132 }
130 }
133 }
131 return Authomatic(config=authomatic_conf, secret=settings["authomatic.secret"])
134 return Authomatic(
135 config=authomatic_conf, secret=settings['authomatic.secret'])
@@ -52,13 +52,13 b' class RuleBase(object):'
52 :param field_name:
52 :param field_name:
53 :return:
53 :return:
54 """
54 """
55 parts = field_name.split(":") if field_name else []
55 parts = field_name.split(':') if field_name else []
56 found = struct
56 found = struct
57 while parts:
57 while parts:
58 current_key = parts.pop(0)
58 current_key = parts.pop(0)
59 found = found.get(current_key)
59 found = found.get(current_key)
60 if not found and parts:
60 if not found and parts:
61 raise KeyNotFoundException("Key not found in structure")
61 raise KeyNotFoundException('Key not found in structure')
62 return found
62 return found
63
63
64 @classmethod
64 @classmethod
@@ -72,13 +72,13 b' class RuleBase(object):'
72 :param field_name:
72 :param field_name:
73 :return:
73 :return:
74 """
74 """
75 parts = field_name.split(":")
75 parts = field_name.split(':')
76 found = struct
76 found = struct
77 while parts:
77 while parts:
78 current_key = parts.pop(0)
78 current_key = parts.pop(0)
79 found = getattr(found, current_key, None)
79 found = getattr(found, current_key, None)
80 if not found and parts:
80 if not found and parts:
81 raise KeyNotFoundException("Key not found in structure")
81 raise KeyNotFoundException('Key not found in structure')
82 return found
82 return found
83
83
84 def normalized_type(self, field, value):
84 def normalized_type(self, field, value):
@@ -89,32 +89,28 b' class RuleBase(object):'
89 """
89 """
90 f_type = self.type_matrix.get(field)
90 f_type = self.type_matrix.get(field)
91 if f_type:
91 if f_type:
92 cast_to = f_type["type"]
92 cast_to = f_type['type']
93 else:
93 else:
94 raise UnknownTypeException("Unknown type")
94 raise UnknownTypeException('Unknown type')
95
95
96 if value is None:
96 if value is None:
97 return None
97 return None
98
98
99 try:
99 try:
100 if cast_to == "int":
100 if cast_to == 'int':
101 return int(value)
101 return int(value)
102 elif cast_to == "float":
102 elif cast_to == 'float':
103 return float(value)
103 return float(value)
104 elif cast_to == "unicode":
104 elif cast_to == 'unicode':
105 return str(value)
105 return str(value)
106 except ValueError as exc:
106 except ValueError as exc:
107 raise InvalidValueException(exc)
107 raise InvalidValueException(exc)
108
108
109
109
110 class Rule(RuleBase):
110 class Rule(RuleBase):
111 def __init__(
111 def __init__(self, config, type_matrix,
112 self,
112 struct_getter=RuleBase.default_dict_struct_getter,
113 config,
113 config_manipulator=None):
114 type_matrix,
115 struct_getter=RuleBase.default_dict_struct_getter,
116 config_manipulator=None,
117 ):
118 """
114 """
119
115
120 :param config: dict - contains rule configuration
116 :param config: dict - contains rule configuration
@@ -163,9 +159,8 b' class Rule(RuleBase):'
163 config_manipulator(self)
159 config_manipulator(self)
164
160
165 def subrule_check(self, rule_config, struct):
161 def subrule_check(self, rule_config, struct):
166 rule = Rule(
162 rule = Rule(rule_config, self.type_matrix,
167 rule_config, self.type_matrix, config_manipulator=self.config_manipulator
163 config_manipulator=self.config_manipulator)
168 )
169 return rule.match(struct)
164 return rule.match(struct)
170
165
171 def match(self, struct):
166 def match(self, struct):
@@ -174,41 +169,32 b' class Rule(RuleBase):'
174 First tries report value, then tests tags in not found, then finally
169 First tries report value, then tests tags in not found, then finally
175 report group
170 report group
176 """
171 """
177 field_name = self.config.get("field")
172 field_name = self.config.get('field')
178 test_value = self.config.get("value")
173 test_value = self.config.get('value')
179
174
180 if not field_name:
175 if not field_name:
181 return False
176 return False
182
177
183 if field_name == "__AND__":
178 if field_name == '__AND__':
184 rule = AND(
179 rule = AND(self.config['rules'], self.type_matrix,
185 self.config["rules"],
180 config_manipulator=self.config_manipulator)
186 self.type_matrix,
187 config_manipulator=self.config_manipulator,
188 )
189 return rule.match(struct)
181 return rule.match(struct)
190 elif field_name == "__OR__":
182 elif field_name == '__OR__':
191 rule = OR(
183 rule = OR(self.config['rules'], self.type_matrix,
192 self.config["rules"],
184 config_manipulator=self.config_manipulator)
193 self.type_matrix,
194 config_manipulator=self.config_manipulator,
195 )
196 return rule.match(struct)
185 return rule.match(struct)
197 elif field_name == "__NOT__":
186 elif field_name == '__NOT__':
198 rule = NOT(
187 rule = NOT(self.config['rules'], self.type_matrix,
199 self.config["rules"],
188 config_manipulator=self.config_manipulator)
200 self.type_matrix,
201 config_manipulator=self.config_manipulator,
202 )
203 return rule.match(struct)
189 return rule.match(struct)
204
190
205 if test_value is None:
191 if test_value is None:
206 return False
192 return False
207
193
208 try:
194 try:
209 struct_value = self.normalized_type(
195 struct_value = self.normalized_type(field_name,
210 field_name, self.struct_getter(struct, field_name)
196 self.struct_getter(struct,
211 )
197 field_name))
212 except (UnknownTypeException, InvalidValueException) as exc:
198 except (UnknownTypeException, InvalidValueException) as exc:
213 log.error(str(exc))
199 log.error(str(exc))
214 return False
200 return False
@@ -219,23 +205,24 b' class Rule(RuleBase):'
219 log.error(str(exc))
205 log.error(str(exc))
220 return False
206 return False
221
207
222 if self.config["op"] not in ("startswith", "endswith", "contains"):
208 if self.config['op'] not in ('startswith', 'endswith', 'contains'):
223 try:
209 try:
224 return getattr(operator, self.config["op"])(struct_value, test_value)
210 return getattr(operator,
211 self.config['op'])(struct_value, test_value)
225 except TypeError:
212 except TypeError:
226 return False
213 return False
227 elif self.config["op"] == "startswith":
214 elif self.config['op'] == 'startswith':
228 return struct_value.startswith(test_value)
215 return struct_value.startswith(test_value)
229 elif self.config["op"] == "endswith":
216 elif self.config['op'] == 'endswith':
230 return struct_value.endswith(test_value)
217 return struct_value.endswith(test_value)
231 elif self.config["op"] == "contains":
218 elif self.config['op'] == 'contains':
232 return test_value in struct_value
219 return test_value in struct_value
233 raise BadConfigException(
220 raise BadConfigException('Invalid configuration, '
234 "Invalid configuration, " "unknown operator: {}".format(self.config)
221 'unknown operator: {}'.format(self.config))
235 )
236
222
237 def __repr__(self):
223 def __repr__(self):
238 return "<Rule {} {}>".format(self.config.get("field"), self.config.get("value"))
224 return '<Rule {} {}>'.format(self.config.get('field'),
225 self.config.get('value'))
239
226
240
227
241 class AND(Rule):
228 class AND(Rule):
@@ -244,7 +231,8 b' class AND(Rule):'
244 self.rules = rules
231 self.rules = rules
245
232
246 def match(self, struct):
233 def match(self, struct):
247 return all([self.subrule_check(r_conf, struct) for r_conf in self.rules])
234 return all([self.subrule_check(r_conf, struct) for r_conf
235 in self.rules])
248
236
249
237
250 class NOT(Rule):
238 class NOT(Rule):
@@ -253,7 +241,8 b' class NOT(Rule):'
253 self.rules = rules
241 self.rules = rules
254
242
255 def match(self, struct):
243 def match(self, struct):
256 return all([not self.subrule_check(r_conf, struct) for r_conf in self.rules])
244 return all([not self.subrule_check(r_conf, struct) for r_conf
245 in self.rules])
257
246
258
247
259 class OR(Rule):
248 class OR(Rule):
@@ -262,12 +251,14 b' class OR(Rule):'
262 self.rules = rules
251 self.rules = rules
263
252
264 def match(self, struct):
253 def match(self, struct):
265 return any([self.subrule_check(r_conf, struct) for r_conf in self.rules])
254 return any([self.subrule_check(r_conf, struct) for r_conf
255 in self.rules])
266
256
267
257
268 class RuleService(object):
258 class RuleService(object):
269 @staticmethod
259 @staticmethod
270 def rule_from_config(config, field_mappings, labels_dict, manipulator_func=None):
260 def rule_from_config(config, field_mappings, labels_dict,
261 manipulator_func=None):
271 """
262 """
272 Returns modified rule with manipulator function
263 Returns modified rule with manipulator function
273 By default manipulator function replaces field id from labels_dict
264 By default manipulator function replaces field id from labels_dict
@@ -279,33 +270,28 b' class RuleService(object):'
279 """
270 """
280 rev_map = {}
271 rev_map = {}
281 for k, v in labels_dict.items():
272 for k, v in labels_dict.items():
282 rev_map[(v["agg"], v["key"])] = k
273 rev_map[(v['agg'], v['key'],)] = k
283
274
284 if manipulator_func is None:
275 if manipulator_func is None:
285
286 def label_rewriter_func(rule):
276 def label_rewriter_func(rule):
287 field = rule.config.get("field")
277 field = rule.config.get('field')
288 if not field or rule.config["field"] in [
278 if not field or rule.config['field'] in ['__OR__',
289 "__OR__",
279 '__AND__', '__NOT__']:
290 "__AND__",
291 "__NOT__",
292 ]:
293 return
280 return
294
281
295 to_map = field_mappings.get(rule.config["field"])
282 to_map = field_mappings.get(rule.config['field'])
296
283
297 # we need to replace series field with _AE_NOT_FOUND_ to not match
284 # we need to replace series field with _AE_NOT_FOUND_ to not match
298 # accidently some other field which happens to have the series that
285 # accidently some other field which happens to have the series that
299 # was used when the alert was created
286 # was used when the alert was created
300 if to_map:
287 if to_map:
301 to_replace = rev_map.get(
288 to_replace = rev_map.get((to_map['agg'], to_map['key'],),
302 (to_map["agg"], to_map["key"]), "_AE_NOT_FOUND_"
289 '_AE_NOT_FOUND_')
303 )
304 else:
290 else:
305 to_replace = "_AE_NOT_FOUND_"
291 to_replace = '_AE_NOT_FOUND_'
306
292
307 rule.config["field"] = to_replace
293 rule.config['field'] = to_replace
308 rule.type_matrix[to_replace] = {"type": "float"}
294 rule.type_matrix[to_replace] = {"type": 'float'}
309
295
310 manipulator_func = label_rewriter_func
296 manipulator_func = label_rewriter_func
311
297
@@ -14,9 +14,8 b''
14 # See the License for the specific language governing permissions and
14 # See the License for the specific language governing permissions and
15 # limitations under the License.
15 # limitations under the License.
16
16
17 from ziggurat_foundations.models.services.external_identity import (
17 from ziggurat_foundations.models.services.external_identity import \
18 ExternalIdentityService,
18 ExternalIdentityService
19 )
20 from appenlight.models.external_identity import ExternalIdentity
19 from appenlight.models.external_identity import ExternalIdentity
21
20
22
21
@@ -25,38 +24,37 b' def handle_social_data(request, user, social_data):'
25 update_identity = False
24 update_identity = False
26
25
27 extng_id = ExternalIdentityService.by_external_id_and_provider(
26 extng_id = ExternalIdentityService.by_external_id_and_provider(
28 social_data["user"]["id"], social_data["credentials"].provider_name
27 social_data['user']['id'],
28 social_data['credentials'].provider_name
29 )
29 )
30
30
31 # fix legacy accounts with wrong google ID
31 # fix legacy accounts with wrong google ID
32 if not extng_id and social_data["credentials"].provider_name == "google":
32 if not extng_id and social_data['credentials'].provider_name == 'google':
33 extng_id = ExternalIdentityService.by_external_id_and_provider(
33 extng_id = ExternalIdentityService.by_external_id_and_provider(
34 social_data["user"]["email"], social_data["credentials"].provider_name
34 social_data['user']['email'],
35 social_data['credentials'].provider_name
35 )
36 )
36
37
37 if extng_id:
38 if extng_id:
38 extng_id.delete()
39 extng_id.delete()
39 update_identity = True
40 update_identity = True
40
41
41 if not social_data["user"]["id"]:
42 if not social_data['user']['id']:
42 request.session.flash(
43 request.session.flash(
43 "No external user id found? Perhaps permissions for "
44 'No external user id found? Perhaps permissions for '
44 "authentication are set incorrectly",
45 'authentication are set incorrectly', 'error')
45 "error",
46 )
47 return False
46 return False
48
47
49 if not extng_id or update_identity:
48 if not extng_id or update_identity:
50 if not update_identity:
49 if not update_identity:
51 request.session.flash(
50 request.session.flash('Your external identity is now '
52 "Your external identity is now " "connected with your account"
51 'connected with your account')
53 )
54 ex_identity = ExternalIdentity()
52 ex_identity = ExternalIdentity()
55 ex_identity.external_id = social_data["user"]["id"]
53 ex_identity.external_id = social_data['user']['id']
56 ex_identity.external_user_name = social_data["user"]["user_name"]
54 ex_identity.external_user_name = social_data['user']['user_name']
57 ex_identity.provider_name = social_data["credentials"].provider_name
55 ex_identity.provider_name = social_data['credentials'].provider_name
58 ex_identity.access_token = social_data["credentials"].token
56 ex_identity.access_token = social_data['credentials'].token
59 ex_identity.token_secret = social_data["credentials"].token_secret
57 ex_identity.token_secret = social_data['credentials'].token_secret
60 ex_identity.alt_token = social_data["credentials"].refresh_token
58 ex_identity.alt_token = social_data['credentials'].refresh_token
61 user.external_identities.append(ex_identity)
59 user.external_identities.append(ex_identity)
62 request.session.pop("zigg.social_auth", None)
60 request.session.pop('zigg.social_auth', None)
@@ -28,30 +28,32 b' from collections import namedtuple'
28 from datetime import timedelta, datetime, date
28 from datetime import timedelta, datetime, date
29 from dogpile.cache.api import NO_VALUE
29 from dogpile.cache.api import NO_VALUE
30 from appenlight.models import Datastores
30 from appenlight.models import Datastores
31 from appenlight.validators import LogSearchSchema, TagListSchema, accepted_search_params
31 from appenlight.validators import (LogSearchSchema,
32 TagListSchema,
33 accepted_search_params)
32 from itsdangerous import TimestampSigner
34 from itsdangerous import TimestampSigner
33 from ziggurat_foundations.permissions import ALL_PERMISSIONS
35 from ziggurat_foundations.permissions import ALL_PERMISSIONS
34 from ziggurat_foundations.models.services.user import UserService
35 from dateutil.relativedelta import relativedelta
36 from dateutil.relativedelta import relativedelta
36 from dateutil.rrule import rrule, MONTHLY, DAILY
37 from dateutil.rrule import rrule, MONTHLY, DAILY
37
38
38 log = logging.getLogger(__name__)
39 log = logging.getLogger(__name__)
39
40
40
41
41 Stat = namedtuple("Stat", "start_interval value")
42 Stat = namedtuple('Stat', 'start_interval value')
42
43
43
44
44 def default_extractor(item):
45 def default_extractor(item):
45 """
46 """
46 :param item - item to extract date from
47 :param item - item to extract date from
47 """
48 """
48 if hasattr(item, "start_interval"):
49 if hasattr(item, 'start_interval'):
49 return item.start_interval
50 return item.start_interval
50 return item["start_interval"]
51 return item['start_interval']
51
52
52
53
53 # fast gap generator
54 # fast gap generator
54 def gap_gen_default(start, step, itemiterator, end_time=None, iv_extractor=None):
55 def gap_gen_default(start, step, itemiterator, end_time=None,
56 iv_extractor=None):
55 """ generates a list of time/value items based on step and itemiterator
57 """ generates a list of time/value items based on step and itemiterator
56 if there are entries missing from iterator time/None will be returned
58 if there are entries missing from iterator time/None will be returned
57 instead
59 instead
@@ -97,31 +99,27 b' class DateTimeEncoder(json.JSONEncoder):'
97 return json.JSONEncoder.default(self, obj)
99 return json.JSONEncoder.default(self, obj)
98
100
99
101
100 def channelstream_request(
102 def channelstream_request(secret, endpoint, payload, throw_exceptions=False,
101 secret, endpoint, payload, throw_exceptions=False, servers=None
103 servers=None):
102 ):
103 responses = []
104 responses = []
104 if not servers:
105 if not servers:
105 servers = []
106 servers = []
106
107
107 signer = TimestampSigner(secret)
108 signer = TimestampSigner(secret)
108 sig_for_server = signer.sign(endpoint)
109 sig_for_server = signer.sign(endpoint)
109 for secret, server in [(s["secret"], s["server"]) for s in servers]:
110 for secret, server in [(s['secret'], s['server']) for s in servers]:
110 response = {}
111 response = {}
111 secret_headers = {
112 secret_headers = {'x-channelstream-secret': sig_for_server,
112 "x-channelstream-secret": sig_for_server,
113 'x-channelstream-endpoint': endpoint,
113 "x-channelstream-endpoint": endpoint,
114 'Content-Type': 'application/json'}
114 "Content-Type": "application/json",
115 url = '%s%s' % (server, endpoint)
115 }
116 url = "%s%s" % (server, endpoint)
117 try:
116 try:
118 response = requests.post(
117 response = requests.post(url,
119 url,
118 data=json.dumps(payload,
120 data=json.dumps(payload, cls=DateTimeEncoder),
119 cls=DateTimeEncoder),
121 headers=secret_headers,
120 headers=secret_headers,
122 verify=False,
121 verify=False,
123 timeout=2,
122 timeout=2).json()
124 ).json()
125 except requests.exceptions.RequestException as e:
123 except requests.exceptions.RequestException as e:
126 if throw_exceptions:
124 if throw_exceptions:
127 raise
125 raise
@@ -131,15 +129,13 b' def channelstream_request('
131
129
132 def add_cors_headers(response):
130 def add_cors_headers(response):
133 # allow CORS
131 # allow CORS
134 response.headers.add("Access-Control-Allow-Origin", "*")
132 response.headers.add('Access-Control-Allow-Origin', '*')
135 response.headers.add("XDomainRequestAllowed", "1")
133 response.headers.add('XDomainRequestAllowed', '1')
136 response.headers.add("Access-Control-Allow-Methods", "GET, POST, OPTIONS")
134 response.headers.add('Access-Control-Allow-Methods', 'GET, POST, OPTIONS')
137 # response.headers.add('Access-Control-Allow-Credentials', 'true')
135 # response.headers.add('Access-Control-Allow-Credentials', 'true')
138 response.headers.add(
136 response.headers.add('Access-Control-Allow-Headers',
139 "Access-Control-Allow-Headers",
137 'Content-Type, Depth, User-Agent, X-File-Size, X-Requested-With, If-Modified-Since, X-File-Name, Cache-Control, Pragma, Origin, Connection, Referer, Cookie')
140 "Content-Type, Depth, User-Agent, X-File-Size, X-Requested-With, If-Modified-Since, X-File-Name, Cache-Control, Pragma, Origin, Connection, Referer, Cookie",
138 response.headers.add('Access-Control-Max-Age', '86400')
141 )
142 response.headers.add("Access-Control-Max-Age", "86400")
143
139
144
140
145 from sqlalchemy.sql import compiler
141 from sqlalchemy.sql import compiler
@@ -148,7 +144,6 b' from psycopg2.extensions import adapt as sqlescape'
148
144
149 # or use the appropiate escape function from your db driver
145 # or use the appropiate escape function from your db driver
150
146
151
152 def compile_query(query):
147 def compile_query(query):
153 dialect = query.session.bind.dialect
148 dialect = query.session.bind.dialect
154 statement = query.statement
149 statement = query.statement
@@ -170,23 +165,22 b' def convert_es_type(input_data):'
170 return str(input_data)
165 return str(input_data)
171
166
172
167
173 ProtoVersion = namedtuple("ProtoVersion", ["major", "minor", "patch"])
168 ProtoVersion = namedtuple('ProtoVersion', ['major', 'minor', 'patch'])
174
169
175
170
176 def parse_proto(input_data):
171 def parse_proto(input_data):
177 try:
172 try:
178 parts = [int(x) for x in input_data.split(".")]
173 parts = [int(x) for x in input_data.split('.')]
179 while len(parts) < 3:
174 while len(parts) < 3:
180 parts.append(0)
175 parts.append(0)
181 return ProtoVersion(*parts)
176 return ProtoVersion(*parts)
182 except Exception as e:
177 except Exception as e:
183 log.info("Unknown protocol version: %s" % e)
178 log.info('Unknown protocol version: %s' % e)
184 return ProtoVersion(99, 99, 99)
179 return ProtoVersion(99, 99, 99)
185
180
186
181
187 def es_index_name_limiter(
182 def es_index_name_limiter(start_date=None, end_date=None, months_in_past=6,
188 start_date=None, end_date=None, months_in_past=6, ixtypes=None
183 ixtypes=None):
189 ):
190 """
184 """
191 This function limits the search to 6 months by default so we don't have to
185 This function limits the search to 6 months by default so we don't have to
192 query 300 elasticsearch indices for 20 years of historical data for example
186 query 300 elasticsearch indices for 20 years of historical data for example
@@ -194,23 +188,23 b' def es_index_name_limiter('
194
188
195 # should be cached later
189 # should be cached later
196 def get_possible_names():
190 def get_possible_names():
197 return list(Datastores.es.indices.get_alias("*"))
191 return list(Datastores.es.aliases().keys())
198
192
199 possible_names = get_possible_names()
193 possible_names = get_possible_names()
200 es_index_types = []
194 es_index_types = []
201 if not ixtypes:
195 if not ixtypes:
202 ixtypes = ["reports", "metrics", "logs"]
196 ixtypes = ['reports', 'metrics', 'logs']
203 for t in ixtypes:
197 for t in ixtypes:
204 if t == "reports":
198 if t == 'reports':
205 es_index_types.append("rcae_r_%s")
199 es_index_types.append('rcae_r_%s')
206 elif t == "logs":
200 elif t == 'logs':
207 es_index_types.append("rcae_l_%s")
201 es_index_types.append('rcae_l_%s')
208 elif t == "metrics":
202 elif t == 'metrics':
209 es_index_types.append("rcae_m_%s")
203 es_index_types.append('rcae_m_%s')
210 elif t == "uptime":
204 elif t == 'uptime':
211 es_index_types.append("rcae_uptime_ce_%s")
205 es_index_types.append('rcae_u_%s')
212 elif t == "slow_calls":
206 elif t == 'slow_calls':
213 es_index_types.append("rcae_sc_%s")
207 es_index_types.append('rcae_sc_%s')
214
208
215 if start_date:
209 if start_date:
216 start_date = copy.copy(start_date)
210 start_date = copy.copy(start_date)
@@ -222,34 +216,26 b' def es_index_name_limiter('
222 if not end_date:
216 if not end_date:
223 end_date = start_date + relativedelta(months=months_in_past)
217 end_date = start_date + relativedelta(months=months_in_past)
224
218
225 index_dates = list(
219 index_dates = list(rrule(MONTHLY,
226 rrule(
220 dtstart=start_date.date().replace(day=1),
227 MONTHLY,
221 until=end_date.date(),
228 dtstart=start_date.date().replace(day=1),
222 count=36))
229 until=end_date.date(),
230 count=36,
231 )
232 )
233 index_names = []
223 index_names = []
234 for ix_type in es_index_types:
224 for ix_type in es_index_types:
235 to_extend = [
225 to_extend = [ix_type % d.strftime('%Y_%m') for d in index_dates
236 ix_type % d.strftime("%Y_%m")
226 if ix_type % d.strftime('%Y_%m') in possible_names]
237 for d in index_dates
238 if ix_type % d.strftime("%Y_%m") in possible_names
239 ]
240 index_names.extend(to_extend)
227 index_names.extend(to_extend)
241 for day in list(
228 for day in list(rrule(DAILY, dtstart=start_date.date(),
242 rrule(DAILY, dtstart=start_date.date(), until=end_date.date(), count=366)
229 until=end_date.date(), count=366)):
243 ):
230 ix_name = ix_type % day.strftime('%Y_%m_%d')
244 ix_name = ix_type % day.strftime("%Y_%m_%d")
245 if ix_name in possible_names:
231 if ix_name in possible_names:
246 index_names.append(ix_name)
232 index_names.append(ix_name)
247 return index_names
233 return index_names
248
234
249
235
250 def build_filter_settings_from_query_dict(
236 def build_filter_settings_from_query_dict(
251 request, params=None, override_app_ids=None, resource_permissions=None
237 request, params=None, override_app_ids=None,
252 ):
238 resource_permissions=None):
253 """
239 """
254 Builds list of normalized search terms for ES from query params
240 Builds list of normalized search terms for ES from query params
255 ensuring application list is restricted to only applications user
241 ensuring application list is restricted to only applications user
@@ -262,12 +248,11 b' def build_filter_settings_from_query_dict('
262 params = copy.deepcopy(params)
248 params = copy.deepcopy(params)
263 applications = []
249 applications = []
264 if not resource_permissions:
250 if not resource_permissions:
265 resource_permissions = ["view"]
251 resource_permissions = ['view']
266
252
267 if request.user:
253 if request.user:
268 applications = UserService.resources_with_perms(
254 applications = request.user.resources_with_perms(
269 request.user, resource_permissions, resource_types=["application"]
255 resource_permissions, resource_types=['application'])
270 )
271
256
272 # CRITICAL - this ensures our resultset is limited to only the ones
257 # CRITICAL - this ensures our resultset is limited to only the ones
273 # user has view permissions
258 # user has view permissions
@@ -287,11 +272,11 b' def build_filter_settings_from_query_dict('
287 for k, v in list(filter_settings.items()):
272 for k, v in list(filter_settings.items()):
288 if k in accepted_search_params:
273 if k in accepted_search_params:
289 continue
274 continue
290 tag_list.append({"name": k, "value": v, "op": "eq"})
275 tag_list.append({"name": k, "value": v, "op": 'eq'})
291 # remove the key from filter_settings
276 # remove the key from filter_settings
292 filter_settings.pop(k, None)
277 filter_settings.pop(k, None)
293 tags = tag_schema.deserialize(tag_list)
278 tags = tag_schema.deserialize(tag_list)
294 filter_settings["tags"] = tags
279 filter_settings['tags'] = tags
295 return filter_settings
280 return filter_settings
296
281
297
282
@@ -313,36 +298,26 b' def permission_tuple_to_dict(data):'
313 "resource_type": None,
298 "resource_type": None,
314 "resource_id": None,
299 "resource_id": None,
315 "group_name": None,
300 "group_name": None,
316 "group_id": None,
301 "group_id": None
317 }
302 }
318 if data.user:
303 if data.user:
319 out["user_name"] = data.user.user_name
304 out["user_name"] = data.user.user_name
320 if data.perm_name == ALL_PERMISSIONS:
305 if data.perm_name == ALL_PERMISSIONS:
321 out["perm_name"] = "__all_permissions__"
306 out['perm_name'] = '__all_permissions__'
322 if data.resource:
307 if data.resource:
323 out["resource_name"] = data.resource.resource_name
308 out['resource_name'] = data.resource.resource_name
324 out["resource_type"] = data.resource.resource_type
309 out['resource_type'] = data.resource.resource_type
325 out["resource_id"] = data.resource.resource_id
310 out['resource_id'] = data.resource.resource_id
326 if data.group:
311 if data.group:
327 out["group_name"] = data.group.group_name
312 out['group_name'] = data.group.group_name
328 out["group_id"] = data.group.id
313 out['group_id'] = data.group.id
329 return out
314 return out
330
315
331
316
332 def get_cached_buckets(
317 def get_cached_buckets(request, stats_since, end_time, fn, cache_key,
333 request,
318 gap_gen=None, db_session=None, step_interval=None,
334 stats_since,
319 iv_extractor=None,
335 end_time,
320 rerange=False, *args, **kwargs):
336 fn,
337 cache_key,
338 gap_gen=None,
339 db_session=None,
340 step_interval=None,
341 iv_extractor=None,
342 rerange=False,
343 *args,
344 **kwargs
345 ):
346 """ Takes "fn" that should return some data and tries to load the data
321 """ Takes "fn" that should return some data and tries to load the data
347 dividing it into daily buckets - if the stats_since and end time give a
322 dividing it into daily buckets - if the stats_since and end time give a
348 delta bigger than 24hours, then only "todays" data is computed on the fly
323 delta bigger than 24hours, then only "todays" data is computed on the fly
@@ -384,28 +359,25 b' def get_cached_buckets('
384 # do not use custom interval if total time range with new iv would exceed
359 # do not use custom interval if total time range with new iv would exceed
385 # end time
360 # end time
386 if not step_interval or stats_since + step_interval >= end_time:
361 if not step_interval or stats_since + step_interval >= end_time:
387 if delta < h.time_deltas.get("12h")["delta"]:
362 if delta < h.time_deltas.get('12h')['delta']:
388 step_interval = timedelta(seconds=60)
363 step_interval = timedelta(seconds=60)
389 elif delta < h.time_deltas.get("3d")["delta"]:
364 elif delta < h.time_deltas.get('3d')['delta']:
390 step_interval = timedelta(seconds=60 * 5)
365 step_interval = timedelta(seconds=60 * 5)
391 elif delta > h.time_deltas.get("2w")["delta"]:
366 elif delta > h.time_deltas.get('2w')['delta']:
392 step_interval = timedelta(days=1)
367 step_interval = timedelta(days=1)
393 else:
368 else:
394 step_interval = timedelta(minutes=60)
369 step_interval = timedelta(minutes=60)
395
370
396 if step_interval >= timedelta(minutes=60):
371 if step_interval >= timedelta(minutes=60):
397 log.info(
372 log.info('cached_buckets:{}: adjusting start time '
398 "cached_buckets:{}: adjusting start time "
373 'for hourly or daily intervals'.format(cache_key))
399 "for hourly or daily intervals".format(cache_key)
400 )
401 stats_since = stats_since.replace(hour=0, minute=0)
374 stats_since = stats_since.replace(hour=0, minute=0)
402
375
403 ranges = [
376 ranges = [i.start_interval for i in list(gap_gen(stats_since,
404 i.start_interval
377 step_interval, [],
405 for i in list(gap_gen(stats_since, step_interval, [], end_time=end_time))
378 end_time=end_time))]
406 ]
407 buckets = {}
379 buckets = {}
408 storage_key = "buckets:" + cache_key + "{}|{}"
380 storage_key = 'buckets:' + cache_key + '{}|{}'
409 # this means we basicly cache per hour in 3-14 day intervals but i think
381 # this means we basicly cache per hour in 3-14 day intervals but i think
410 # its fine at this point - will be faster than db access anyways
382 # its fine at this point - will be faster than db access anyways
411
383
@@ -418,67 +390,45 b' def get_cached_buckets('
418 k = storage_key.format(step_interval.total_seconds(), r)
390 k = storage_key.format(step_interval.total_seconds(), r)
419 value = request.registry.cache_regions.redis_day_30.get(k)
391 value = request.registry.cache_regions.redis_day_30.get(k)
420 # last buckets are never loaded from cache
392 # last buckets are never loaded from cache
421 is_last_result = r >= end_time - timedelta(hours=6) or r in last_ranges
393 is_last_result = (
394 r >= end_time - timedelta(hours=6) or r in last_ranges)
422 if value is not NO_VALUE and not is_last_result:
395 if value is not NO_VALUE and not is_last_result:
423 log.info(
396 log.info("cached_buckets:{}: "
424 "cached_buckets:{}: "
397 "loading range {} from cache".format(cache_key, r))
425 "loading range {} from cache".format(cache_key, r)
426 )
427 buckets[r] = value
398 buckets[r] = value
428 else:
399 else:
429 log.info(
400 log.info("cached_buckets:{}: "
430 "cached_buckets:{}: "
401 "loading range {} from storage".format(cache_key, r))
431 "loading range {} from storage".format(cache_key, r)
432 )
433 range_size = step_interval
402 range_size = step_interval
434 if (
403 if (step_interval == timedelta(minutes=60) and
435 step_interval == timedelta(minutes=60)
404 not is_last_result and rerange):
436 and not is_last_result
437 and rerange
438 ):
439 range_size = timedelta(days=1)
405 range_size = timedelta(days=1)
440 r = r.replace(hour=0, minute=0)
406 r = r.replace(hour=0, minute=0)
441 log.info(
407 log.info("cached_buckets:{}: "
442 "cached_buckets:{}: "
408 "loading collapsed "
443 "loading collapsed "
409 "range {} {}".format(cache_key, r,
444 "range {} {}".format(cache_key, r, r + range_size)
410 r + range_size))
445 )
446 bucket_data = fn(
411 bucket_data = fn(
447 request,
412 request, r, r + range_size, step_interval,
448 r,
413 gap_gen, bucket_count=len(ranges), *args, **kwargs)
449 r + range_size,
450 step_interval,
451 gap_gen,
452 bucket_count=len(ranges),
453 *args,
454 **kwargs
455 )
456 for b in bucket_data:
414 for b in bucket_data:
457 b_iv = iv_extractor(b)
415 b_iv = iv_extractor(b)
458 buckets[b_iv] = b
416 buckets[b_iv] = b
459 k2 = storage_key.format(step_interval.total_seconds(), b_iv)
417 k2 = storage_key.format(
418 step_interval.total_seconds(), b_iv)
460 request.registry.cache_regions.redis_day_30.set(k2, b)
419 request.registry.cache_regions.redis_day_30.set(k2, b)
461 log.info("cached_buckets:{}: saving cache".format(cache_key))
420 log.info("cached_buckets:{}: saving cache".format(cache_key))
462 else:
421 else:
463 # bucket count is 1 for short time ranges <= 24h from now
422 # bucket count is 1 for short time ranges <= 24h from now
464 bucket_data = fn(
423 bucket_data = fn(request, stats_since, end_time, step_interval,
465 request,
424 gap_gen, bucket_count=1, *args, **kwargs)
466 stats_since,
467 end_time,
468 step_interval,
469 gap_gen,
470 bucket_count=1,
471 *args,
472 **kwargs
473 )
474 for b in bucket_data:
425 for b in bucket_data:
475 buckets[iv_extractor(b)] = b
426 buckets[iv_extractor(b)] = b
476 return buckets
427 return buckets
477
428
478
429
479 def get_cached_split_data(
430 def get_cached_split_data(request, stats_since, end_time, fn, cache_key,
480 request, stats_since, end_time, fn, cache_key, db_session=None, *args, **kwargs
431 db_session=None, *args, **kwargs):
481 ):
482 """ Takes "fn" that should return some data and tries to load the data
432 """ Takes "fn" that should return some data and tries to load the data
483 dividing it into 2 buckets - cached "since_from" bucket and "today"
433 dividing it into 2 buckets - cached "since_from" bucket and "today"
484 bucket - then the data can be reduced into single value
434 bucket - then the data can be reduced into single value
@@ -491,51 +441,43 b' def get_cached_split_data('
491 delta = end_time - stats_since
441 delta = end_time - stats_since
492
442
493 if delta >= timedelta(minutes=60):
443 if delta >= timedelta(minutes=60):
494 log.info(
444 log.info('cached_split_data:{}: adjusting start time '
495 "cached_split_data:{}: adjusting start time "
445 'for hourly or daily intervals'.format(cache_key))
496 "for hourly or daily intervals".format(cache_key)
497 )
498 stats_since = stats_since.replace(hour=0, minute=0)
446 stats_since = stats_since.replace(hour=0, minute=0)
499
447
500 storage_key = "buckets_split_data:" + cache_key + ":{}|{}"
448 storage_key = 'buckets_split_data:' + cache_key + ':{}|{}'
501 old_end_time = end_time.replace(hour=0, minute=0)
449 old_end_time = end_time.replace(hour=0, minute=0)
502
450
503 final_storage_key = storage_key.format(delta.total_seconds(), old_end_time)
451 final_storage_key = storage_key.format(delta.total_seconds(),
452 old_end_time)
504 older_data = None
453 older_data = None
505
454
506 cdata = request.registry.cache_regions.redis_day_7.get(final_storage_key)
455 cdata = request.registry.cache_regions.redis_day_7.get(
456 final_storage_key)
507
457
508 if cdata:
458 if cdata:
509 log.info("cached_split_data:{}: found old " "bucket data".format(cache_key))
459 log.info("cached_split_data:{}: found old "
460 "bucket data".format(cache_key))
510 older_data = cdata
461 older_data = cdata
511
462
512 if stats_since < end_time - h.time_deltas.get("24h")["delta"] and not cdata:
463 if (stats_since < end_time - h.time_deltas.get('24h')['delta'] and
513 log.info(
464 not cdata):
514 "cached_split_data:{}: didn't find the "
465 log.info("cached_split_data:{}: didn't find the "
515 "start bucket in cache so load older data".format(cache_key)
466 "start bucket in cache so load older data".format(cache_key))
516 )
517 recent_stats_since = old_end_time
467 recent_stats_since = old_end_time
518 older_data = fn(
468 older_data = fn(request, stats_since, recent_stats_since,
519 request,
469 db_session=db_session, *args, **kwargs)
520 stats_since,
470 request.registry.cache_regions.redis_day_7.set(final_storage_key,
521 recent_stats_since,
471 older_data)
522 db_session=db_session,
472 elif stats_since < end_time - h.time_deltas.get('24h')['delta']:
523 *args,
524 **kwargs
525 )
526 request.registry.cache_regions.redis_day_7.set(final_storage_key, older_data)
527 elif stats_since < end_time - h.time_deltas.get("24h")["delta"]:
528 recent_stats_since = old_end_time
473 recent_stats_since = old_end_time
529 else:
474 else:
530 recent_stats_since = stats_since
475 recent_stats_since = stats_since
531
476
532 log.info(
477 log.info("cached_split_data:{}: loading fresh "
533 "cached_split_data:{}: loading fresh "
478 "data bucksts from last 24h ".format(cache_key))
534 "data bucksts from last 24h ".format(cache_key)
479 todays_data = fn(request, recent_stats_since, end_time,
535 )
480 db_session=db_session, *args, **kwargs)
536 todays_data = fn(
537 request, recent_stats_since, end_time, db_session=db_session, *args, **kwargs
538 )
539 return older_data, todays_data
481 return older_data, todays_data
540
482
541
483
@@ -545,16 +487,4 b' def in_batches(seq, size):'
545 :param seq (iterable)
487 :param seq (iterable)
546 :param size integer
488 :param size integer
547 """
489 """
548 return (seq[pos : pos + size] for pos in range(0, len(seq), size))
490 return (seq[pos:pos + size] for pos in range(0, len(seq), size))
549
550
551 def get_es_info(cache_regions, es_conn):
552 @cache_regions.memory_min_10.cache_on_arguments()
553 def get_es_info_cached():
554 returned_info = {"raw_info": es_conn.info()}
555 returned_info["version"] = returned_info["raw_info"]["version"]["number"].split(
556 "."
557 )
558 return returned_info
559
560 return get_es_info_cached()
@@ -24,138 +24,119 b' log = logging.getLogger(__name__)'
24
24
25 def parse_airbrake_xml(request):
25 def parse_airbrake_xml(request):
26 root = request.context.airbrake_xml_etree
26 root = request.context.airbrake_xml_etree
27 error = root.find("error")
27 error = root.find('error')
28 notifier = root.find("notifier")
28 notifier = root.find('notifier')
29 server_env = root.find("server-environment")
29 server_env = root.find('server-environment')
30 request_data = root.find("request")
30 request_data = root.find('request')
31 user = root.find("current-user")
31 user = root.find('current-user')
32 if request_data is not None:
32 if request_data is not None:
33 cgi_data = request_data.find("cgi-data")
33 cgi_data = request_data.find('cgi-data')
34 if cgi_data is None:
34 if cgi_data is None:
35 cgi_data = []
35 cgi_data = []
36
36
37 error_dict = {
37 error_dict = {
38 "class_name": error.findtext("class") or "",
38 'class_name': error.findtext('class') or '',
39 "error": error.findtext("message") or "",
39 'error': error.findtext('message') or '',
40 "occurences": 1,
40 "occurences": 1,
41 "http_status": 500,
41 "http_status": 500,
42 "priority": 5,
42 "priority": 5,
43 "server": "unknown",
43 "server": 'unknown',
44 "url": "unknown",
44 'url': 'unknown', 'request': {}
45 "request": {},
46 }
45 }
47 if user is not None:
46 if user is not None:
48 error_dict["username"] = user.findtext("username") or user.findtext("id")
47 error_dict['username'] = user.findtext('username') or \
48 user.findtext('id')
49 if notifier is not None:
49 if notifier is not None:
50 error_dict["client"] = notifier.findtext("name")
50 error_dict['client'] = notifier.findtext('name')
51
51
52 if server_env is not None:
52 if server_env is not None:
53 error_dict["server"] = server_env.findtext("hostname", "unknown")
53 error_dict["server"] = server_env.findtext('hostname', 'unknown')
54
54
55 whitelist_environ = [
55 whitelist_environ = ['REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME',
56 "REMOTE_USER",
56 'CONTENT_TYPE', 'HTTP_REFERER']
57 "REMOTE_ADDR",
58 "SERVER_NAME",
59 "CONTENT_TYPE",
60 "HTTP_REFERER",
61 ]
62
57
63 if request_data is not None:
58 if request_data is not None:
64 error_dict["url"] = request_data.findtext("url", "unknown")
59 error_dict['url'] = request_data.findtext('url', 'unknown')
65 component = request_data.findtext("component")
60 component = request_data.findtext('component')
66 action = request_data.findtext("action")
61 action = request_data.findtext('action')
67 if component and action:
62 if component and action:
68 error_dict["view_name"] = "%s:%s" % (component, action)
63 error_dict['view_name'] = '%s:%s' % (component, action)
69 for node in cgi_data:
64 for node in cgi_data:
70 key = node.get("key")
65 key = node.get('key')
71 if key.startswith("HTTP") or key in whitelist_environ:
66 if key.startswith('HTTP') or key in whitelist_environ:
72 error_dict["request"][key] = node.text
67 error_dict['request'][key] = node.text
73 elif "query_parameters" in key:
68 elif 'query_parameters' in key:
74 error_dict["request"]["GET"] = {}
69 error_dict['request']['GET'] = {}
75 for x in node:
70 for x in node:
76 error_dict["request"]["GET"][x.get("key")] = x.text
71 error_dict['request']['GET'][x.get('key')] = x.text
77 elif "request_parameters" in key:
72 elif 'request_parameters' in key:
78 error_dict["request"]["POST"] = {}
73 error_dict['request']['POST'] = {}
79 for x in node:
74 for x in node:
80 error_dict["request"]["POST"][x.get("key")] = x.text
75 error_dict['request']['POST'][x.get('key')] = x.text
81 elif key.endswith("cookie"):
76 elif key.endswith('cookie'):
82 error_dict["request"]["COOKIE"] = {}
77 error_dict['request']['COOKIE'] = {}
83 for x in node:
78 for x in node:
84 error_dict["request"]["COOKIE"][x.get("key")] = x.text
79 error_dict['request']['COOKIE'][x.get('key')] = x.text
85 elif key.endswith("request_id"):
80 elif key.endswith('request_id'):
86 error_dict["request_id"] = node.text
81 error_dict['request_id'] = node.text
87 elif key.endswith("session"):
82 elif key.endswith('session'):
88 error_dict["request"]["SESSION"] = {}
83 error_dict['request']['SESSION'] = {}
89 for x in node:
84 for x in node:
90 error_dict["request"]["SESSION"][x.get("key")] = x.text
85 error_dict['request']['SESSION'][x.get('key')] = x.text
91 else:
86 else:
92 if key in ["rack.session.options"]:
87 if key in ['rack.session.options']:
93 # skip secret configs
88 # skip secret configs
94 continue
89 continue
95 try:
90 try:
96 if len(node):
91 if len(node):
97 error_dict["request"][key] = dict(
92 error_dict['request'][key] = dict(
98 [(x.get("key"), x.text) for x in node]
93 [(x.get('key'), x.text,) for x in node])
99 )
100 else:
94 else:
101 error_dict["request"][key] = node.text
95 error_dict['request'][key] = node.text
102 except Exception as e:
96 except Exception as e:
103 log.warning("Airbrake integration exception: %s" % e)
97 log.warning('Airbrake integration exception: %s' % e)
104
98
105 error_dict["request"].pop("HTTP_COOKIE", "")
99 error_dict['request'].pop('HTTP_COOKIE', '')
106
100
107 error_dict["ip"] = error_dict.pop("REMOTE_ADDR", "")
101 error_dict['ip'] = error_dict.pop('REMOTE_ADDR', '')
108 error_dict["user_agent"] = error_dict.pop("HTTP_USER_AGENT", "")
102 error_dict['user_agent'] = error_dict.pop('HTTP_USER_AGENT', '')
109 if "request_id" not in error_dict:
103 if 'request_id' not in error_dict:
110 error_dict["request_id"] = str(uuid.uuid4())
104 error_dict['request_id'] = str(uuid.uuid4())
111 if request.context.possibly_public:
105 if request.context.possibly_public:
112 # set ip for reports that come from airbrake js client
106 # set ip for reports that come from airbrake js client
113 error_dict["timestamp"] = datetime.utcnow()
107 error_dict["timestamp"] = datetime.utcnow()
114 if request.environ.get("HTTP_X_FORWARDED_FOR"):
108 if request.environ.get("HTTP_X_FORWARDED_FOR"):
115 ip = request.environ.get("HTTP_X_FORWARDED_FOR", "")
109 ip = request.environ.get("HTTP_X_FORWARDED_FOR", '')
116 first_ip = ip.split(",")[0]
110 first_ip = ip.split(',')[0]
117 remote_addr = first_ip.strip()
111 remote_addr = first_ip.strip()
118 else:
112 else:
119 remote_addr = request.environ.get("HTTP_X_REAL_IP") or request.environ.get(
113 remote_addr = (request.environ.get("HTTP_X_REAL_IP") or
120 "REMOTE_ADDR"
114 request.environ.get('REMOTE_ADDR'))
121 )
122 error_dict["ip"] = remote_addr
115 error_dict["ip"] = remote_addr
123
116
124 blacklist = [
117 blacklist = ['password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf',
125 "password",
118 'session', 'test']
126 "passwd",
127 "pwd",
128 "auth_tkt",
129 "secret",
130 "csrf",
131 "session",
132 "test",
133 ]
134
119
135 lines = []
120 lines = []
136 for l in error.find("backtrace"):
121 for l in error.find('backtrace'):
137 lines.append(
122 lines.append({'file': l.get("file", ""),
138 {
123 'line': l.get("number", ""),
139 "file": l.get("file", ""),
124 'fn': l.get("method", ""),
140 "line": l.get("number", ""),
125 'module': l.get("module", ""),
141 "fn": l.get("method", ""),
126 'cline': l.get("method", ""),
142 "module": l.get("module", ""),
127 'vars': {}})
143 "cline": l.get("method", ""),
128 error_dict['traceback'] = list(reversed(lines))
144 "vars": {},
145 }
146 )
147 error_dict["traceback"] = list(reversed(lines))
148 # filtering is not provided by airbrake
129 # filtering is not provided by airbrake
149 keys_to_check = (
130 keys_to_check = (
150 error_dict["request"].get("COOKIE"),
131 error_dict['request'].get('COOKIE'),
151 error_dict["request"].get("COOKIES"),
132 error_dict['request'].get('COOKIES'),
152 error_dict["request"].get("POST"),
133 error_dict['request'].get('POST'),
153 error_dict["request"].get("SESSION"),
134 error_dict['request'].get('SESSION'),
154 )
135 )
155 for source in [_f for _f in keys_to_check if _f]:
136 for source in [_f for _f in keys_to_check if _f]:
156 for k in source.keys():
137 for k in source.keys():
157 for bad_key in blacklist:
138 for bad_key in blacklist:
158 if bad_key in k.lower():
139 if bad_key in k.lower():
159 source[k] = "***"
140 source[k] = '***'
160
141
161 return error_dict
142 return error_dict
@@ -22,12 +22,12 b' log = logging.getLogger(__name__)'
22
22
23
23
24 def to_relativedelta(time_delta):
24 def to_relativedelta(time_delta):
25 return relativedelta(
25 return relativedelta(seconds=int(time_delta.total_seconds()),
26 seconds=int(time_delta.total_seconds()), microseconds=time_delta.microseconds
26 microseconds=time_delta.microseconds)
27 )
28
27
29
28
30 def convert_date(date_str, return_utcnow_if_wrong=True, normalize_future=False):
29 def convert_date(date_str, return_utcnow_if_wrong=True,
30 normalize_future=False):
31 utcnow = datetime.utcnow()
31 utcnow = datetime.utcnow()
32 if isinstance(date_str, datetime):
32 if isinstance(date_str, datetime):
33 # get rid of tzinfo
33 # get rid of tzinfo
@@ -36,21 +36,21 b' def convert_date(date_str, return_utcnow_if_wrong=True, normalize_future=False):'
36 return utcnow
36 return utcnow
37 try:
37 try:
38 try:
38 try:
39 if "Z" in date_str:
39 if 'Z' in date_str:
40 date_str = date_str[: date_str.index("Z")]
40 date_str = date_str[:date_str.index('Z')]
41 if "." in date_str:
41 if '.' in date_str:
42 date = datetime.strptime(date_str, "%Y-%m-%dT%H:%M:%S.%f")
42 date = datetime.strptime(date_str, '%Y-%m-%dT%H:%M:%S.%f')
43 else:
43 else:
44 date = datetime.strptime(date_str, "%Y-%m-%dT%H:%M:%S")
44 date = datetime.strptime(date_str, '%Y-%m-%dT%H:%M:%S')
45 except Exception:
45 except Exception:
46 # bw compat with old client
46 # bw compat with old client
47 date = datetime.strptime(date_str, "%Y-%m-%d %H:%M:%S,%f")
47 date = datetime.strptime(date_str, '%Y-%m-%d %H:%M:%S,%f')
48 except Exception:
48 except Exception:
49 if return_utcnow_if_wrong:
49 if return_utcnow_if_wrong:
50 date = utcnow
50 date = utcnow
51 else:
51 else:
52 date = None
52 date = None
53 if normalize_future and date and date > (utcnow + timedelta(minutes=3)):
53 if normalize_future and date and date > (utcnow + timedelta(minutes=3)):
54 log.warning("time %s in future + 3 min, normalizing" % date)
54 log.warning('time %s in future + 3 min, normalizing' % date)
55 return utcnow
55 return utcnow
56 return date
56 return date
@@ -19,68 +19,45 b' from datetime import timedelta'
19 from appenlight.lib.enums import LogLevelPython, ParsedSentryEventType
19 from appenlight.lib.enums import LogLevelPython, ParsedSentryEventType
20
20
21 EXCLUDED_LOG_VARS = [
21 EXCLUDED_LOG_VARS = [
22 "args",
22 'args', 'asctime', 'created', 'exc_info', 'exc_text', 'filename',
23 "asctime",
23 'funcName', 'levelname', 'levelno', 'lineno', 'message', 'module', 'msecs',
24 "created",
24 'msg', 'name', 'pathname', 'process', 'processName', 'relativeCreated',
25 "exc_info",
25 'thread', 'threadName']
26 "exc_text",
27 "filename",
28 "funcName",
29 "levelname",
30 "levelno",
31 "lineno",
32 "message",
33 "module",
34 "msecs",
35 "msg",
36 "name",
37 "pathname",
38 "process",
39 "processName",
40 "relativeCreated",
41 "thread",
42 "threadName",
43 ]
44
26
45 EXCLUDE_SENTRY_KEYS = [
27 EXCLUDE_SENTRY_KEYS = [
46 "csp",
28 'csp',
47 "culprit",
29 'culprit',
48 "event_id",
30 'event_id',
49 "exception",
31 'exception',
50 "extra",
32 'extra',
51 "level",
33 'level',
52 "logentry",
34 'logentry',
53 "logger",
35 'logger',
54 "message",
36 'message',
55 "modules",
37 'modules',
56 "platform",
38 'platform',
57 "query",
39 'query',
58 "release",
40 'release',
59 "request",
41 'request',
60 "sentry.interfaces.Csp",
42 'sentry.interfaces.Csp', 'sentry.interfaces.Exception',
61 "sentry.interfaces.Exception",
43 'sentry.interfaces.Http', 'sentry.interfaces.Message',
62 "sentry.interfaces.Http",
44 'sentry.interfaces.Query',
63 "sentry.interfaces.Message",
45 'sentry.interfaces.Stacktrace',
64 "sentry.interfaces.Query",
46 'sentry.interfaces.Template', 'sentry.interfaces.User',
65 "sentry.interfaces.Stacktrace",
47 'sentry.interfaces.csp.Csp',
66 "sentry.interfaces.Template",
48 'sentry.interfaces.exception.Exception',
67 "sentry.interfaces.User",
49 'sentry.interfaces.http.Http',
68 "sentry.interfaces.csp.Csp",
50 'sentry.interfaces.message.Message',
69 "sentry.interfaces.exception.Exception",
51 'sentry.interfaces.query.Query',
70 "sentry.interfaces.http.Http",
52 'sentry.interfaces.stacktrace.Stacktrace',
71 "sentry.interfaces.message.Message",
53 'sentry.interfaces.template.Template',
72 "sentry.interfaces.query.Query",
54 'sentry.interfaces.user.User', 'server_name',
73 "sentry.interfaces.stacktrace.Stacktrace",
55 'stacktrace',
74 "sentry.interfaces.template.Template",
56 'tags',
75 "sentry.interfaces.user.User",
57 'template',
76 "server_name",
58 'time_spent',
77 "stacktrace",
59 'timestamp',
78 "tags",
60 'user']
79 "template",
80 "time_spent",
81 "timestamp",
82 "user",
83 ]
84
61
85
62
86 def get_keys(list_of_keys, json_body):
63 def get_keys(list_of_keys, json_body):
@@ -90,32 +67,30 b' def get_keys(list_of_keys, json_body):'
90
67
91
68
92 def get_logentry(json_body):
69 def get_logentry(json_body):
93 key_names = [
70 key_names = ['logentry',
94 "logentry",
71 'sentry.interfaces.message.Message',
95 "sentry.interfaces.message.Message",
72 'sentry.interfaces.Message'
96 "sentry.interfaces.Message",
73 ]
97 ]
98 logentry = get_keys(key_names, json_body)
74 logentry = get_keys(key_names, json_body)
99 return logentry
75 return logentry
100
76
101
77
102 def get_exception(json_body):
78 def get_exception(json_body):
103 parsed_exception = {}
79 parsed_exception = {}
104 key_names = [
80 key_names = ['exception',
105 "exception",
81 'sentry.interfaces.exception.Exception',
106 "sentry.interfaces.exception.Exception",
82 'sentry.interfaces.Exception'
107 "sentry.interfaces.Exception",
83 ]
108 ]
109 exception = get_keys(key_names, json_body) or {}
84 exception = get_keys(key_names, json_body) or {}
110 if exception:
85 if exception:
111 if isinstance(exception, dict):
86 if isinstance(exception, dict):
112 exception = exception["values"][0]
87 exception = exception['values'][0]
113 else:
88 else:
114 exception = exception[0]
89 exception = exception[0]
115
90
116 parsed_exception["type"] = exception.get("type")
91 parsed_exception['type'] = exception.get('type')
117 parsed_exception["value"] = exception.get("value")
92 parsed_exception['value'] = exception.get('value')
118 parsed_exception["module"] = exception.get("module")
93 parsed_exception['module'] = exception.get('module')
119 parsed_stacktrace = get_stacktrace(exception) or {}
94 parsed_stacktrace = get_stacktrace(exception) or {}
120 parsed_exception = exception or {}
95 parsed_exception = exception or {}
121 return parsed_exception, parsed_stacktrace
96 return parsed_exception, parsed_stacktrace
@@ -123,45 +98,41 b' def get_exception(json_body):'
123
98
124 def get_stacktrace(json_body):
99 def get_stacktrace(json_body):
125 parsed_stacktrace = []
100 parsed_stacktrace = []
126 key_names = [
101 key_names = ['stacktrace',
127 "stacktrace",
102 'sentry.interfaces.stacktrace.Stacktrace',
128 "sentry.interfaces.stacktrace.Stacktrace",
103 'sentry.interfaces.Stacktrace'
129 "sentry.interfaces.Stacktrace",
104 ]
130 ]
131 stacktrace = get_keys(key_names, json_body)
105 stacktrace = get_keys(key_names, json_body)
132 if stacktrace:
106 if stacktrace:
133 for frame in stacktrace["frames"]:
107 for frame in stacktrace['frames']:
134 parsed_stacktrace.append(
108 parsed_stacktrace.append(
135 {
109 {"cline": frame.get('context_line', ''),
136 "cline": frame.get("context_line", ""),
110 "file": frame.get('filename', ''),
137 "file": frame.get("filename", ""),
111 "module": frame.get('module', ''),
138 "module": frame.get("module", ""),
112 "fn": frame.get('function', ''),
139 "fn": frame.get("function", ""),
113 "line": frame.get('lineno', ''),
140 "line": frame.get("lineno", ""),
114 "vars": list(frame.get('vars', {}).items())
141 "vars": list(frame.get("vars", {}).items()),
115 }
142 }
143 )
116 )
144 return parsed_stacktrace
117 return parsed_stacktrace
145
118
146
119
147 def get_template(json_body):
120 def get_template(json_body):
148 parsed_template = {}
121 parsed_template = {}
149 key_names = [
122 key_names = ['template',
150 "template",
123 'sentry.interfaces.template.Template',
151 "sentry.interfaces.template.Template",
124 'sentry.interfaces.Template'
152 "sentry.interfaces.Template",
125 ]
153 ]
154 template = get_keys(key_names, json_body)
126 template = get_keys(key_names, json_body)
155 if template:
127 if template:
156 for frame in template["frames"]:
128 for frame in template['frames']:
157 parsed_template.append(
129 parsed_template.append(
158 {
130 {"cline": frame.get('context_line', ''),
159 "cline": frame.get("context_line", ""),
131 "file": frame.get('filename', ''),
160 "file": frame.get("filename", ""),
132 "fn": '',
161 "fn": "",
133 "line": frame.get('lineno', ''),
162 "line": frame.get("lineno", ""),
134 "vars": []
163 "vars": [],
135 }
164 }
165 )
136 )
166
137
167 return parsed_template
138 return parsed_template
@@ -169,13 +140,16 b' def get_template(json_body):'
169
140
170 def get_request(json_body):
141 def get_request(json_body):
171 parsed_http = {}
142 parsed_http = {}
172 key_names = ["request", "sentry.interfaces.http.Http", "sentry.interfaces.Http"]
143 key_names = ['request',
144 'sentry.interfaces.http.Http',
145 'sentry.interfaces.Http'
146 ]
173 http = get_keys(key_names, json_body) or {}
147 http = get_keys(key_names, json_body) or {}
174 for k, v in http.items():
148 for k, v in http.items():
175 if k == "headers":
149 if k == 'headers':
176 parsed_http["headers"] = {}
150 parsed_http['headers'] = {}
177 for sk, sv in http["headers"].items():
151 for sk, sv in http['headers'].items():
178 parsed_http["headers"][sk.title()] = sv
152 parsed_http['headers'][sk.title()] = sv
179 else:
153 else:
180 parsed_http[k.lower()] = v
154 parsed_http[k.lower()] = v
181 return parsed_http
155 return parsed_http
@@ -183,47 +157,53 b' def get_request(json_body):'
183
157
184 def get_user(json_body):
158 def get_user(json_body):
185 parsed_user = {}
159 parsed_user = {}
186 key_names = ["user", "sentry.interfaces.user.User", "sentry.interfaces.User"]
160 key_names = ['user',
161 'sentry.interfaces.user.User',
162 'sentry.interfaces.User'
163 ]
187 user = get_keys(key_names, json_body)
164 user = get_keys(key_names, json_body)
188 if user:
165 if user:
189 parsed_user["id"] = user.get("id")
166 parsed_user['id'] = user.get('id')
190 parsed_user["username"] = user.get("username")
167 parsed_user['username'] = user.get('username')
191 parsed_user["email"] = user.get("email")
168 parsed_user['email'] = user.get('email')
192 parsed_user["ip_address"] = user.get("ip_address")
169 parsed_user['ip_address'] = user.get('ip_address')
193
170
194 return parsed_user
171 return parsed_user
195
172
196
173
197 def get_query(json_body):
174 def get_query(json_body):
198 query = None
175 query = None
199 key_name = ["query", "sentry.interfaces.query.Query", "sentry.interfaces.Query"]
176 key_name = ['query',
177 'sentry.interfaces.query.Query',
178 'sentry.interfaces.Query'
179 ]
200 query = get_keys(key_name, json_body)
180 query = get_keys(key_name, json_body)
201 return query
181 return query
202
182
203
183
204 def parse_sentry_event(json_body):
184 def parse_sentry_event(json_body):
205 request_id = json_body.get("event_id")
185 request_id = json_body.get('event_id')
206
186
207 # required
187 # required
208 message = json_body.get("message")
188 message = json_body.get('message')
209 log_timestamp = json_body.get("timestamp")
189 log_timestamp = json_body.get('timestamp')
210 level = json_body.get("level")
190 level = json_body.get('level')
211 if isinstance(level, int):
191 if isinstance(level, int):
212 level = LogLevelPython.key_from_value(level)
192 level = LogLevelPython.key_from_value(level)
213
193
214 namespace = json_body.get("logger")
194 namespace = json_body.get('logger')
215 language = json_body.get("platform")
195 language = json_body.get('platform')
216
196
217 # optional
197 # optional
218 server_name = json_body.get("server_name")
198 server_name = json_body.get('server_name')
219 culprit = json_body.get("culprit")
199 culprit = json_body.get('culprit')
220 release = json_body.get("release")
200 release = json_body.get('release')
221
201
222 tags = json_body.get("tags", {})
202 tags = json_body.get('tags', {})
223 if hasattr(tags, "items"):
203 if hasattr(tags, 'items'):
224 tags = list(tags.items())
204 tags = list(tags.items())
225 extra = json_body.get("extra", {})
205 extra = json_body.get('extra', {})
226 if hasattr(extra, "items"):
206 if hasattr(extra, 'items'):
227 extra = list(extra.items())
207 extra = list(extra.items())
228
208
229 parsed_req = get_request(json_body)
209 parsed_req = get_request(json_body)
@@ -232,13 +212,12 b' def parse_sentry_event(json_body):'
232 query = get_query(json_body)
212 query = get_query(json_body)
233
213
234 # other unidentified keys found
214 # other unidentified keys found
235 other_keys = [
215 other_keys = [(k, json_body[k]) for k in json_body.keys()
236 (k, json_body[k]) for k in json_body.keys() if k not in EXCLUDE_SENTRY_KEYS
216 if k not in EXCLUDE_SENTRY_KEYS]
237 ]
238
217
239 logentry = get_logentry(json_body)
218 logentry = get_logentry(json_body)
240 if logentry:
219 if logentry:
241 message = logentry["message"]
220 message = logentry['message']
242
221
243 exception, stacktrace = get_exception(json_body)
222 exception, stacktrace = get_exception(json_body)
244
223
@@ -248,70 +227,70 b' def parse_sentry_event(json_body):'
248 event_type = ParsedSentryEventType.LOG
227 event_type = ParsedSentryEventType.LOG
249
228
250 event_dict = {
229 event_dict = {
251 "log_level": level,
230 'log_level': level,
252 "message": message,
231 'message': message,
253 "namespace": namespace,
232 'namespace': namespace,
254 "request_id": request_id,
233 'request_id': request_id,
255 "server": server_name,
234 'server': server_name,
256 "date": log_timestamp,
235 'date': log_timestamp,
257 "tags": tags,
236 'tags': tags
258 }
237 }
259 event_dict["tags"].extend(
238 event_dict['tags'].extend(
260 [(k, v) for k, v in extra if k not in EXCLUDED_LOG_VARS]
239 [(k, v) for k, v in extra if k not in EXCLUDED_LOG_VARS])
261 )
262
240
263 # other keys can be various object types
241 # other keys can be various object types
264 event_dict["tags"].extend([(k, v) for k, v in other_keys if isinstance(v, str)])
242 event_dict['tags'].extend([(k, v) for k, v in other_keys
243 if isinstance(v, str)])
265 if culprit:
244 if culprit:
266 event_dict["tags"].append(("sentry_culprit", culprit))
245 event_dict['tags'].append(('sentry_culprit', culprit))
267 if language:
246 if language:
268 event_dict["tags"].append(("sentry_language", language))
247 event_dict['tags'].append(('sentry_language', language))
269 if release:
248 if release:
270 event_dict["tags"].append(("sentry_release", release))
249 event_dict['tags'].append(('sentry_release', release))
271
250
272 if exception or stacktrace or alt_stacktrace or template:
251 if exception or stacktrace or alt_stacktrace or template:
273 event_type = ParsedSentryEventType.ERROR_REPORT
252 event_type = ParsedSentryEventType.ERROR_REPORT
274 event_dict = {
253 event_dict = {
275 "client": "sentry",
254 'client': 'sentry',
276 "error": message,
255 'error': message,
277 "namespace": namespace,
256 'namespace': namespace,
278 "request_id": request_id,
257 'request_id': request_id,
279 "server": server_name,
258 'server': server_name,
280 "start_time": log_timestamp,
259 'start_time': log_timestamp,
281 "end_time": None,
260 'end_time': None,
282 "tags": tags,
261 'tags': tags,
283 "extra": extra,
262 'extra': extra,
284 "language": language,
263 'language': language,
285 "view_name": json_body.get("culprit"),
264 'view_name': json_body.get('culprit'),
286 "http_status": None,
265 'http_status': None,
287 "username": None,
266 'username': None,
288 "url": parsed_req.get("url"),
267 'url': parsed_req.get('url'),
289 "ip": None,
268 'ip': None,
290 "user_agent": None,
269 'user_agent': None,
291 "request": None,
270 'request': None,
292 "slow_calls": None,
271 'slow_calls': None,
293 "request_stats": None,
272 'request_stats': None,
294 "traceback": None,
273 'traceback': None
295 }
274 }
296
275
297 event_dict["extra"].extend(other_keys)
276 event_dict['extra'].extend(other_keys)
298 if release:
277 if release:
299 event_dict["tags"].append(("sentry_release", release))
278 event_dict['tags'].append(('sentry_release', release))
300 event_dict["request"] = parsed_req
279 event_dict['request'] = parsed_req
301 if "headers" in parsed_req:
280 if 'headers' in parsed_req:
302 event_dict["user_agent"] = parsed_req["headers"].get("User-Agent")
281 event_dict['user_agent'] = parsed_req['headers'].get('User-Agent')
303 if "env" in parsed_req:
282 if 'env' in parsed_req:
304 event_dict["ip"] = parsed_req["env"].get("REMOTE_ADDR")
283 event_dict['ip'] = parsed_req['env'].get('REMOTE_ADDR')
305 ts_ms = int(json_body.get("time_spent") or 0)
284 ts_ms = int(json_body.get('time_spent') or 0)
306 if ts_ms > 0:
285 if ts_ms > 0:
307 event_dict["end_time"] = event_dict["start_time"] + timedelta(
286 event_dict['end_time'] = event_dict['start_time'] + \
308 milliseconds=ts_ms
287 timedelta(milliseconds=ts_ms)
309 )
310 if stacktrace or alt_stacktrace or template:
288 if stacktrace or alt_stacktrace or template:
311 event_dict["traceback"] = stacktrace or alt_stacktrace or template
289 event_dict['traceback'] = stacktrace or alt_stacktrace or template
312 for k in list(event_dict.keys()):
290 for k in list(event_dict.keys()):
313 if event_dict[k] is None:
291 if event_dict[k] is None:
314 del event_dict[k]
292 del event_dict[k]
315 if user:
293 if user:
316 event_dict["username"] = user["username"] or user["id"] or user["email"]
294 event_dict['username'] = user['username'] or user['id'] \
295 or user['email']
317 return event_dict, event_type
296 return event_dict, event_type
@@ -13,3 +13,5 b''
13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 # See the License for the specific language governing permissions and
14 # See the License for the specific language governing permissions and
15 # limitations under the License.
15 # limitations under the License.
16
17
@@ -41,7 +41,7 b' target_metadata = MetaData(naming_convention=NAMING_CONVENTION)'
41 # my_important_option = config.get_main_option("my_important_option")
41 # my_important_option = config.get_main_option("my_important_option")
42 # ... etc.
42 # ... etc.
43
43
44 VERSION_TABLE_NAME = "alembic_appenlight_version"
44 VERSION_TABLE_NAME = 'alembic_appenlight_version'
45
45
46
46
47 def run_migrations_offline():
47 def run_migrations_offline():
@@ -57,12 +57,9 b' def run_migrations_offline():'
57
57
58 """
58 """
59 url = config.get_main_option("sqlalchemy.url")
59 url = config.get_main_option("sqlalchemy.url")
60 context.configure(
60 context.configure(url=url, target_metadata=target_metadata,
61 url=url,
61 transaction_per_migration=True,
62 target_metadata=target_metadata,
62 version_table=VERSION_TABLE_NAME)
63 transaction_per_migration=True,
64 version_table=VERSION_TABLE_NAME,
65 )
66
63
67 with context.begin_transaction():
64 with context.begin_transaction():
68 context.run_migrations()
65 context.run_migrations()
@@ -77,16 +74,15 b' def run_migrations_online():'
77 """
74 """
78 engine = engine_from_config(
75 engine = engine_from_config(
79 config.get_section(config.config_ini_section),
76 config.get_section(config.config_ini_section),
80 prefix="sqlalchemy.",
77 prefix='sqlalchemy.',
81 poolclass=pool.NullPool,
78 poolclass=pool.NullPool)
82 )
83
79
84 connection = engine.connect()
80 connection = engine.connect()
85 context.configure(
81 context.configure(
86 connection=connection,
82 connection=connection,
87 target_metadata=target_metadata,
83 target_metadata=target_metadata,
88 transaction_per_migration=True,
84 transaction_per_migration=True,
89 version_table=VERSION_TABLE_NAME,
85 version_table=VERSION_TABLE_NAME
90 )
86 )
91
87
92 try:
88 try:
This diff has been collapsed as it changes many lines, (789 lines changed) Show them Hide them
@@ -23,7 +23,7 b' Create Date: 2014-10-13 23:47:38.295159'
23 """
23 """
24
24
25 # revision identifiers, used by Alembic.
25 # revision identifiers, used by Alembic.
26 revision = "55b6e612672f"
26 revision = '55b6e612672f'
27 down_revision = None
27 down_revision = None
28
28
29 from alembic import op
29 from alembic import op
@@ -31,514 +31,348 b' import sqlalchemy as sa'
31
31
32
32
33 def upgrade():
33 def upgrade():
34 op.add_column("users", sa.Column("first_name", sa.Unicode(25)))
34 op.add_column('users', sa.Column('first_name', sa.Unicode(25)))
35 op.add_column("users", sa.Column("last_name", sa.Unicode(50)))
35 op.add_column('users', sa.Column('last_name', sa.Unicode(50)))
36 op.add_column("users", sa.Column("company_name", sa.Unicode(255)))
36 op.add_column('users', sa.Column('company_name', sa.Unicode(255)))
37 op.add_column("users", sa.Column("company_address", sa.Unicode(255)))
37 op.add_column('users', sa.Column('company_address', sa.Unicode(255)))
38 op.add_column("users", sa.Column("phone1", sa.Unicode(25)))
38 op.add_column('users', sa.Column('phone1', sa.Unicode(25)))
39 op.add_column("users", sa.Column("phone2", sa.Unicode(25)))
39 op.add_column('users', sa.Column('phone2', sa.Unicode(25)))
40 op.add_column("users", sa.Column("zip_code", sa.Unicode(25)))
40 op.add_column('users', sa.Column('zip_code', sa.Unicode(25)))
41 op.add_column(
41 op.add_column('users', sa.Column('default_report_sort', sa.Unicode(20), nullable=False, server_default="newest"))
42 "users",
42 op.add_column('users', sa.Column('city', sa.Unicode(128)))
43 sa.Column(
43 op.add_column('users', sa.Column('notes', sa.UnicodeText, server_default=''))
44 "default_report_sort",
44 op.add_column('users', sa.Column('notifications', sa.Boolean(), nullable=False, server_default='true'))
45 sa.Unicode(20),
45 op.add_column('users', sa.Column('registration_ip', sa.Unicode(40), nullable=False, server_default=''))
46 nullable=False,
47 server_default="newest",
48 ),
49 )
50 op.add_column("users", sa.Column("city", sa.Unicode(128)))
51 op.add_column("users", sa.Column("notes", sa.UnicodeText, server_default=""))
52 op.add_column(
53 "users",
54 sa.Column("notifications", sa.Boolean(), nullable=False, server_default="true"),
55 )
56 op.add_column(
57 "users",
58 sa.Column("registration_ip", sa.Unicode(40), nullable=False, server_default=""),
59 )
60
46
61 op.create_table(
47 op.create_table(
62 "integrations",
48 'integrations',
63 sa.Column("id", sa.Integer(), primary_key=True),
49 sa.Column('id', sa.Integer(), primary_key=True),
64 sa.Column(
50 sa.Column('resource_id', sa.Integer(),
65 "resource_id",
51 sa.ForeignKey('resources.resource_id', onupdate='cascade',
66 sa.Integer(),
52 ondelete='cascade')),
67 sa.ForeignKey(
53 sa.Column('integration_name', sa.Unicode(64)),
68 "resources.resource_id", onupdate="cascade", ondelete="cascade"
54 sa.Column('config', sa.dialects.postgresql.JSON, nullable=False),
69 ),
55 sa.Column('modified_date', sa.DateTime(), nullable=False, server_default=sa.func.now()),
70 ),
56 sa.Column('external_id', sa.Unicode(255)),
71 sa.Column("integration_name", sa.Unicode(64)),
57 sa.Column('external_id2', sa.Unicode(255))
72 sa.Column("config", sa.dialects.postgresql.JSON, nullable=False),
73 sa.Column(
74 "modified_date", sa.DateTime(), nullable=False, server_default=sa.func.now()
75 ),
76 sa.Column("external_id", sa.Unicode(255)),
77 sa.Column("external_id2", sa.Unicode(255)),
78 )
58 )
79
59
80 op.create_table(
60 op.create_table(
81 "alert_channels",
61 'alert_channels',
82 sa.Column(
62 sa.Column('owner_id', sa.Integer(),
83 "owner_id",
63 sa.ForeignKey('users.id', onupdate='cascade',
84 sa.Integer(),
64 ondelete='cascade'), nullable=False),
85 sa.ForeignKey("users.id", onupdate="cascade", ondelete="cascade"),
65 sa.Column('channel_name', sa.Unicode(25), nullable=False),
86 nullable=False,
66 sa.Column('channel_value', sa.Unicode(80), nullable=False),
87 ),
67 sa.Column('channel_json_conf', sa.dialects.postgresql.JSON, nullable=False),
88 sa.Column("channel_name", sa.Unicode(25), nullable=False),
68 sa.Column('channel_validated', sa.Boolean, nullable=False, server_default='False'),
89 sa.Column("channel_value", sa.Unicode(80), nullable=False),
69 sa.Column('send_alerts', sa.Boolean, nullable=False, server_default='True'),
90 sa.Column("channel_json_conf", sa.dialects.postgresql.JSON, nullable=False),
70 sa.Column('notify_only_first', sa.Boolean, nullable=False, server_default='False'),
91 sa.Column(
71 sa.Column('daily_digest', sa.Boolean, nullable=False, server_default='True'),
92 "channel_validated", sa.Boolean, nullable=False, server_default="False"
72 sa.Column('pkey', sa.Integer(), primary_key=True),
93 ),
73 sa.Column('integration_id', sa.Integer,
94 sa.Column("send_alerts", sa.Boolean, nullable=False, server_default="True"),
74 sa.ForeignKey('integrations.id', onupdate='cascade',
95 sa.Column(
75 ondelete='cascade')),
96 "notify_only_first", sa.Boolean, nullable=False, server_default="False"
76 )
97 ),
77 op.create_unique_constraint('uq_alert_channels', 'alert_channels',
98 sa.Column("daily_digest", sa.Boolean, nullable=False, server_default="True"),
78 ["owner_id", "channel_name", "channel_value"])
99 sa.Column("pkey", sa.Integer(), primary_key=True),
100 sa.Column(
101 "integration_id",
102 sa.Integer,
103 sa.ForeignKey("integrations.id", onupdate="cascade", ondelete="cascade"),
104 ),
105 )
106 op.create_unique_constraint(
107 "uq_alert_channels",
108 "alert_channels",
109 ["owner_id", "channel_name", "channel_value"],
110 )
111
79
112 op.create_table(
80 op.create_table(
113 "alert_channels_actions",
81 'alert_channels_actions',
114 sa.Column("owner_id", sa.Integer(), nullable=False),
82 sa.Column('owner_id', sa.Integer(), nullable=False),
115 sa.Column(
83 sa.Column('resource_id', sa.Integer(),
116 "resource_id",
84 sa.ForeignKey('resources.resource_id', onupdate='cascade',
117 sa.Integer(),
85 ondelete='cascade')),
118 sa.ForeignKey(
86 sa.Column('pkey', sa.Integer(), primary_key=True),
119 "resources.resource_id", onupdate="cascade", ondelete="cascade"
87 sa.Column('action', sa.Unicode(10), nullable=False, server_default='always'),
120 ),
88 sa.Column('rule', sa.dialects.postgresql.JSON),
121 ),
89 sa.Column('type', sa.Unicode(10), index=True),
122 sa.Column("pkey", sa.Integer(), primary_key=True),
90 sa.Column('other_id', sa.Unicode(40), index=True),
123 sa.Column("action", sa.Unicode(10), nullable=False, server_default="always"),
91 sa.Column('config', sa.dialects.postgresql.JSON),
124 sa.Column("rule", sa.dialects.postgresql.JSON),
92 sa.Column('name', sa.Unicode(255), server_default='')
125 sa.Column("type", sa.Unicode(10), index=True),
126 sa.Column("other_id", sa.Unicode(40), index=True),
127 sa.Column("config", sa.dialects.postgresql.JSON),
128 sa.Column("name", sa.Unicode(255), server_default=""),
129 )
93 )
130
94
95
131 op.create_table(
96 op.create_table(
132 "application_postprocess_conf",
97 'application_postprocess_conf',
133 sa.Column("pkey", sa.Integer(), primary_key=True),
98 sa.Column('pkey', sa.Integer(), primary_key=True),
134 sa.Column("do", sa.Unicode(25), nullable=False),
99 sa.Column('do', sa.Unicode(25), nullable=False),
135 sa.Column("new_value", sa.UnicodeText(), nullable=False, server_default=""),
100 sa.Column('new_value', sa.UnicodeText(), nullable=False, server_default=''),
136 sa.Column(
101 sa.Column('resource_id', sa.Integer(),
137 "resource_id",
102 sa.ForeignKey('resources.resource_id',
138 sa.Integer(),
103 onupdate='cascade',
139 sa.ForeignKey(
104 ondelete='cascade'), nullable=False),
140 "resources.resource_id", onupdate="cascade", ondelete="cascade"
105 sa.Column('rule', sa.dialects.postgresql.JSON),
141 ),
142 nullable=False,
143 ),
144 sa.Column("rule", sa.dialects.postgresql.JSON),
145 )
106 )
146
107
147 op.create_table(
108 op.create_table(
148 "applications",
109 'applications',
149 sa.Column(
110 sa.Column('resource_id', sa.Integer(),
150 "resource_id",
111 sa.ForeignKey('resources.resource_id', onupdate='cascade',
151 sa.Integer(),
112 ondelete='cascade'), nullable=False,
152 sa.ForeignKey(
113 primary_key=True, autoincrement=False),
153 "resources.resource_id", onupdate="cascade", ondelete="cascade"
114 sa.Column('domains', sa.UnicodeText, nullable=False),
154 ),
115 sa.Column('api_key', sa.Unicode(32), nullable=False, index=True),
155 nullable=False,
116 sa.Column('default_grouping', sa.Unicode(20), nullable=False, server_default='url_type'),
156 primary_key=True,
117 sa.Column('public_key', sa.Unicode(32), nullable=False, index=True),
157 autoincrement=False,
118 sa.Column('error_report_threshold', sa.Integer(), server_default='10', nullable=False),
158 ),
119 sa.Column('slow_report_threshold', sa.Integer(), server_default='10', nullable=False),
159 sa.Column("domains", sa.UnicodeText, nullable=False),
120 sa.Column('apdex_threshold', sa.Float(), server_default='0.7', nullable=False),
160 sa.Column("api_key", sa.Unicode(32), nullable=False, index=True),
121 sa.Column('allow_permanent_storage', sa.Boolean(), server_default="false", nullable=False),
161 sa.Column(
122 )
162 "default_grouping",
123 op.create_unique_constraint(None, 'applications',
163 sa.Unicode(20),
124 ["public_key"])
164 nullable=False,
125 op.create_unique_constraint(None, 'applications',
165 server_default="url_type",
126 ["api_key"])
166 ),
167 sa.Column("public_key", sa.Unicode(32), nullable=False, index=True),
168 sa.Column(
169 "error_report_threshold", sa.Integer(), server_default="10", nullable=False
170 ),
171 sa.Column(
172 "slow_report_threshold", sa.Integer(), server_default="10", nullable=False
173 ),
174 sa.Column("apdex_threshold", sa.Float(), server_default="0.7", nullable=False),
175 sa.Column(
176 "allow_permanent_storage",
177 sa.Boolean(),
178 server_default="false",
179 nullable=False,
180 ),
181 )
182 op.create_unique_constraint(None, "applications", ["public_key"])
183 op.create_unique_constraint(None, "applications", ["api_key"])
184
127
185 op.create_table(
128 op.create_table(
186 "metrics",
129 'metrics',
187 sa.Column("pkey", sa.types.BigInteger, nullable=False, primary_key=True),
130 sa.Column('pkey', sa.types.BigInteger, nullable=False, primary_key=True),
188 sa.Column(
131 sa.Column('resource_id', sa.Integer(),
189 "resource_id",
132 sa.ForeignKey('resources.resource_id',
190 sa.Integer(),
133 onupdate='cascade',
191 sa.ForeignKey(
134 ondelete='cascade')),
192 "resources.resource_id", onupdate="cascade", ondelete="cascade"
135 sa.Column('timestamp', sa.DateTime),
193 ),
136 sa.Column('namespace', sa.Unicode(255)),
194 ),
137 sa.Column('tags', sa.dialects.postgresql.JSON, server_default="{}")
195 sa.Column("timestamp", sa.DateTime),
196 sa.Column("namespace", sa.Unicode(255)),
197 sa.Column("tags", sa.dialects.postgresql.JSON, server_default="{}"),
198 )
138 )
199
139
200 op.create_table(
140 op.create_table(
201 "events",
141 'events',
202 sa.Column("id", sa.Integer, nullable=False, primary_key=True),
142 sa.Column('id', sa.Integer, nullable=False, primary_key=True),
203 sa.Column("start_date", sa.DateTime, nullable=False, index=True),
143 sa.Column('start_date', sa.DateTime, nullable=False, index=True),
204 sa.Column("end_date", sa.DateTime),
144 sa.Column('end_date', sa.DateTime),
205 sa.Column("status", sa.Integer(), nullable=False, index=True),
145 sa.Column('status', sa.Integer(), nullable=False, index=True),
206 sa.Column("event_type", sa.Integer(), nullable=False, index=True),
146 sa.Column('event_type', sa.Integer(), nullable=False, index=True),
207 sa.Column("origin_user_id", sa.Integer()),
147 sa.Column('origin_user_id', sa.Integer()),
208 sa.Column("target_user_id", sa.Integer()),
148 sa.Column('target_user_id', sa.Integer()),
209 sa.Column("resource_id", sa.Integer(), index=True),
149 sa.Column('resource_id', sa.Integer(), index=True),
210 sa.Column("text", sa.UnicodeText, server_default=""),
150 sa.Column('text', sa.UnicodeText, server_default=''),
211 sa.Column("values", sa.dialects.postgresql.JSON),
151 sa.Column('values', sa.dialects.postgresql.JSON),
212 sa.Column("target_id", sa.Integer()),
152 sa.Column('target_id', sa.Integer()),
213 sa.Column("target_uuid", sa.Unicode(40), index=True),
153 sa.Column('target_uuid', sa.Unicode(40), index=True)
214 )
154 )
215
155
216 op.create_table(
156 op.create_table(
217 "logs",
157 'logs',
218 sa.Column("log_id", sa.types.BigInteger, nullable=False, primary_key=True),
158 sa.Column('log_id', sa.types.BigInteger, nullable=False, primary_key=True),
219 sa.Column(
159 sa.Column('resource_id', sa.Integer(),
220 "resource_id",
160 sa.ForeignKey('resources.resource_id',
221 sa.Integer(),
161 onupdate='cascade',
222 sa.ForeignKey(
162 ondelete='cascade')),
223 "resources.resource_id", onupdate="cascade", ondelete="cascade"
163 sa.Column('log_level', sa.SmallInteger(), nullable=False),
224 ),
164 sa.Column('primary_key', sa.Unicode(128), nullable=True),
225 ),
165 sa.Column('message', sa.UnicodeText, nullable=False, server_default=''),
226 sa.Column("log_level", sa.SmallInteger(), nullable=False),
166 sa.Column('timestamp', sa.DateTime),
227 sa.Column("primary_key", sa.Unicode(128), nullable=True),
167 sa.Column('namespace', sa.Unicode(255)),
228 sa.Column("message", sa.UnicodeText, nullable=False, server_default=""),
168 sa.Column('request_id', sa.Unicode(40)),
229 sa.Column("timestamp", sa.DateTime),
169 sa.Column('tags', sa.dialects.postgresql.JSON, server_default="{}"),
230 sa.Column("namespace", sa.Unicode(255)),
170 sa.Column('permanent', sa.Boolean(), server_default="false",
231 sa.Column("request_id", sa.Unicode(40)),
171 nullable=False)
232 sa.Column("tags", sa.dialects.postgresql.JSON, server_default="{}"),
233 sa.Column("permanent", sa.Boolean(), server_default="false", nullable=False),
234 )
172 )
235
173
236 op.create_table(
174 op.create_table(
237 "reports_groups",
175 'reports_groups',
238 sa.Column("id", sa.types.BigInteger, primary_key=True),
176 sa.Column('id', sa.types.BigInteger, primary_key=True),
239 sa.Column(
177 sa.Column('resource_id', sa.Integer,
240 "resource_id",
178 sa.ForeignKey('resources.resource_id', onupdate='cascade',
241 sa.Integer,
179 ondelete='cascade'), nullable=False),
242 sa.ForeignKey(
180 sa.Column('priority', sa.Integer, nullable=False, server_default="5"),
243 "resources.resource_id", onupdate="cascade", ondelete="cascade"
181 sa.Column('first_timestamp', sa.DateTime(), nullable=False, server_default=sa.func.now()),
244 ),
182 sa.Column('last_timestamp', sa.DateTime()),
245 nullable=False,
183 sa.Column('error', sa.UnicodeText, nullable=False, server_default=""),
246 ),
184 sa.Column('grouping_hash', sa.Unicode(40), nullable=False, server_default=""),
247 sa.Column("priority", sa.Integer, nullable=False, server_default="5"),
185 sa.Column('triggered_postprocesses_ids', sa.dialects.postgresql.JSON, nullable=False, server_default="[]"),
248 sa.Column(
186 sa.Column('report_type', sa.Integer, nullable=False, server_default="0"),
249 "first_timestamp",
187 sa.Column('total_reports', sa.Integer, nullable=False, server_default="0"),
250 sa.DateTime(),
188 sa.Column('last_report', sa.Integer, nullable=False, server_default="0"),
251 nullable=False,
189 sa.Column('occurences', sa.Integer, nullable=False, server_default="1"),
252 server_default=sa.func.now(),
190 sa.Column('average_duration', sa.Float(), nullable=False, server_default="0"),
253 ),
191 sa.Column('summed_duration', sa.Float(), nullable=False, server_default="0"),
254 sa.Column("last_timestamp", sa.DateTime()),
192 sa.Column('notified', sa.Boolean, nullable=False, server_default="False"),
255 sa.Column("error", sa.UnicodeText, nullable=False, server_default=""),
193 sa.Column('fixed', sa.Boolean, nullable=False, server_default="False"),
256 sa.Column("grouping_hash", sa.Unicode(40), nullable=False, server_default=""),
194 sa.Column('public', sa.Boolean, nullable=False, server_default="False"),
257 sa.Column(
195 sa.Column('read', sa.Boolean, nullable=False, server_default="False"),
258 "triggered_postprocesses_ids",
259 sa.dialects.postgresql.JSON,
260 nullable=False,
261 server_default="[]",
262 ),
263 sa.Column("report_type", sa.Integer, nullable=False, server_default="0"),
264 sa.Column("total_reports", sa.Integer, nullable=False, server_default="0"),
265 sa.Column("last_report", sa.Integer, nullable=False, server_default="0"),
266 sa.Column("occurences", sa.Integer, nullable=False, server_default="1"),
267 sa.Column("average_duration", sa.Float(), nullable=False, server_default="0"),
268 sa.Column("summed_duration", sa.Float(), nullable=False, server_default="0"),
269 sa.Column("notified", sa.Boolean, nullable=False, server_default="False"),
270 sa.Column("fixed", sa.Boolean, nullable=False, server_default="False"),
271 sa.Column("public", sa.Boolean, nullable=False, server_default="False"),
272 sa.Column("read", sa.Boolean, nullable=False, server_default="False"),
273 )
196 )
274
197
275 op.create_table(
198 op.create_table(
276 "reports",
199 'reports',
277 sa.Column("id", sa.types.BigInteger, primary_key=True),
200 sa.Column('id', sa.types.BigInteger, primary_key=True),
278 sa.Column(
201 sa.Column('group_id', sa.types.BigInteger,
279 "group_id",
202 sa.ForeignKey('reports_groups.id', onupdate='cascade',
280 sa.types.BigInteger,
203 ondelete='cascade'), nullable=False, index=True),
281 sa.ForeignKey("reports_groups.id", onupdate="cascade", ondelete="cascade"),
204 sa.Column('resource_id', sa.Integer, nullable=False, index=True),
282 nullable=False,
205 sa.Column('report_type', sa.Integer, nullable=False, server_default="0"),
283 index=True,
206 sa.Column('error', sa.UnicodeText, nullable=False, server_default=""),
284 ),
207 sa.Column('extra', sa.dialects.postgresql.JSON, nullable=False, server_default="{}"),
285 sa.Column("resource_id", sa.Integer, nullable=False, index=True),
208 sa.Column('request', sa.dialects.postgresql.JSON, nullable=False, server_default="{}"),
286 sa.Column("report_type", sa.Integer, nullable=False, server_default="0"),
209 sa.Column('tags', sa.dialects.postgresql.JSON, nullable=False, server_default="{}"),
287 sa.Column("error", sa.UnicodeText, nullable=False, server_default=""),
210 sa.Column('ip', sa.Unicode(39), nullable=False, server_default=""),
288 sa.Column(
211 sa.Column('username', sa.Unicode(255), nullable=False, server_default=""),
289 "extra", sa.dialects.postgresql.JSON, nullable=False, server_default="{}"
212 sa.Column('user_agent', sa.Unicode(512), nullable=False, server_default=""),
290 ),
213 sa.Column('url', sa.UnicodeText, nullable=False, server_default=""),
291 sa.Column(
214 sa.Column('request_id', sa.Unicode(40), nullable=False, server_default=""),
292 "request", sa.dialects.postgresql.JSON, nullable=False, server_default="{}"
215 sa.Column('request_stats', sa.dialects.postgresql.JSON, nullable=False, server_default="{}"),
293 ),
216 sa.Column('traceback', sa.dialects.postgresql.JSON, nullable=False, server_default="{}"),
294 sa.Column(
217 sa.Column('traceback_hash', sa.Unicode(40), nullable=False, server_default=""),
295 "tags", sa.dialects.postgresql.JSON, nullable=False, server_default="{}"
218 sa.Column('start_time', sa.DateTime(), nullable=False, server_default=sa.func.now()),
296 ),
219 sa.Column('end_time', sa.DateTime()),
297 sa.Column("ip", sa.Unicode(39), nullable=False, server_default=""),
220 sa.Column('report_group_time', sa.DateTime, index=True, nullable=False, server_default=sa.func.now()),
298 sa.Column("username", sa.Unicode(255), nullable=False, server_default=""),
221 sa.Column('duration', sa.Float(), nullable=False, server_default="0"),
299 sa.Column("user_agent", sa.Unicode(512), nullable=False, server_default=""),
222 sa.Column('http_status', sa.Integer, index=True),
300 sa.Column("url", sa.UnicodeText, nullable=False, server_default=""),
223 sa.Column('url_domain', sa.Unicode(128)),
301 sa.Column("request_id", sa.Unicode(40), nullable=False, server_default=""),
224 sa.Column('url_path', sa.UnicodeText),
302 sa.Column(
225 sa.Column('language', sa.Integer, server_default="0"),
303 "request_stats",
226 )
304 sa.dialects.postgresql.JSON,
227 op.create_index(None, 'reports',
305 nullable=False,
228 [sa.text("(tags ->> 'server_name')")])
306 server_default="{}",
229 op.create_index(None, 'reports',
307 ),
230 [sa.text("(tags ->> 'view_name')")])
308 sa.Column(
309 "traceback",
310 sa.dialects.postgresql.JSON,
311 nullable=False,
312 server_default="{}",
313 ),
314 sa.Column("traceback_hash", sa.Unicode(40), nullable=False, server_default=""),
315 sa.Column(
316 "start_time", sa.DateTime(), nullable=False, server_default=sa.func.now()
317 ),
318 sa.Column("end_time", sa.DateTime()),
319 sa.Column(
320 "report_group_time",
321 sa.DateTime,
322 index=True,
323 nullable=False,
324 server_default=sa.func.now(),
325 ),
326 sa.Column("duration", sa.Float(), nullable=False, server_default="0"),
327 sa.Column("http_status", sa.Integer, index=True),
328 sa.Column("url_domain", sa.Unicode(128)),
329 sa.Column("url_path", sa.UnicodeText),
330 sa.Column("language", sa.Integer, server_default="0"),
331 )
332 op.create_index(None, "reports", [sa.text("(tags ->> 'server_name')")])
333 op.create_index(None, "reports", [sa.text("(tags ->> 'view_name')")])
334
231
335 op.create_table(
232 op.create_table(
336 "reports_assignments",
233 'reports_assignments',
337 sa.Column("group_id", sa.types.BigInteger, nullable=False, primary_key=True),
234 sa.Column('group_id', sa.types.BigInteger, nullable=False, primary_key=True),
338 sa.Column(
235 sa.Column('owner_id', sa.Integer,
339 "owner_id",
236 sa.ForeignKey('users.id', onupdate='cascade',ondelete='cascade'),
340 sa.Integer,
237 nullable=False, primary_key=True),
341 sa.ForeignKey("users.id", onupdate="cascade", ondelete="cascade"),
238 sa.Column('report_time', sa.DateTime, nullable=False)
342 nullable=False,
239 )
343 primary_key=True,
344 ),
345 sa.Column("report_time", sa.DateTime, nullable=False),
346 )
347
240
348 op.create_table(
241 op.create_table(
349 "reports_comments",
242 'reports_comments',
350 sa.Column("comment_id", sa.Integer, primary_key=True),
243 sa.Column('comment_id', sa.Integer, primary_key=True),
351 sa.Column("body", sa.UnicodeText, nullable=False, server_default=""),
244 sa.Column('body', sa.UnicodeText, nullable=False, server_default=''),
352 sa.Column(
245 sa.Column('owner_id', sa.Integer,
353 "owner_id",
246 sa.ForeignKey('users.id', onupdate='cascade',
354 sa.Integer,
247 ondelete='set null'), nullable=True),
355 sa.ForeignKey("users.id", onupdate="cascade", ondelete="set null"),
248 sa.Column('created_timestamp', sa.DateTime, nullable=False, server_default=sa.func.now()),
356 nullable=True,
249 sa.Column('report_time', sa.DateTime, nullable=False),
357 ),
250 sa.Column('group_id', sa.types.BigInteger, nullable=False)
358 sa.Column(
359 "created_timestamp",
360 sa.DateTime,
361 nullable=False,
362 server_default=sa.func.now(),
363 ),
364 sa.Column("report_time", sa.DateTime, nullable=False),
365 sa.Column("group_id", sa.types.BigInteger, nullable=False),
366 )
251 )
367
252
368 op.create_table(
253 op.create_table(
369 "reports_stats",
254 'reports_stats',
370 sa.Column("resource_id", sa.Integer, nullable=False, index=True),
255 sa.Column('resource_id', sa.Integer, nullable=False, index=True),
371 sa.Column("start_interval", sa.DateTime, nullable=False, index=True),
256 sa.Column('start_interval', sa.DateTime, nullable=False, index=True),
372 sa.Column("group_id", sa.types.BigInteger, index=True),
257 sa.Column('group_id', sa.types.BigInteger, index=True),
373 sa.Column(
258 sa.Column('occurences', sa.Integer, nullable=False, server_default='0', index=True),
374 "occurences", sa.Integer, nullable=False, server_default="0", index=True
259 sa.Column('owner_user_id', sa.Integer),
375 ),
260 sa.Column('type', sa.Integer, index=True, nullable=False),
376 sa.Column("owner_user_id", sa.Integer),
261 sa.Column('duration', sa.Float(), server_default='0'),
377 sa.Column("type", sa.Integer, index=True, nullable=False),
262 sa.Column('server_name', sa.Unicode(128),
378 sa.Column("duration", sa.Float(), server_default="0"),
263 server_default=''),
379 sa.Column("server_name", sa.Unicode(128), server_default=""),
264 sa.Column('view_name', sa.Unicode(128),
380 sa.Column("view_name", sa.Unicode(128), server_default=""),
265 server_default=''),
381 sa.Column("id", sa.BigInteger(), nullable=False, primary_key=True),
266 sa.Column('id', sa.BigInteger(), nullable=False, primary_key=True),
382 )
267 )
383 op.create_index(
268 op.create_index('ix_reports_stats_start_interval_group_id', 'reports_stats',
384 "ix_reports_stats_start_interval_group_id",
269 ["start_interval", "group_id"])
385 "reports_stats",
386 ["start_interval", "group_id"],
387 )
388
270
389 op.create_table(
271 op.create_table(
390 "slow_calls",
272 'slow_calls',
391 sa.Column("id", sa.types.BigInteger, primary_key=True),
273 sa.Column('id', sa.types.BigInteger, primary_key=True),
392 sa.Column(
274 sa.Column('report_id', sa.types.BigInteger, sa.ForeignKey('reports.id', onupdate='cascade', ondelete='cascade'),
393 "report_id",
275 nullable=False, index=True),
394 sa.types.BigInteger,
276 sa.Column('duration', sa.Float(), nullable=False, server_default="0", index=True),
395 sa.ForeignKey("reports.id", onupdate="cascade", ondelete="cascade"),
277 sa.Column('timestamp', sa.DateTime, nullable=False, server_default=sa.func.now(), index=True),
396 nullable=False,
278 sa.Column('report_group_time', sa.DateTime, index=True, nullable=False, server_default=sa.func.now()),
397 index=True,
279 sa.Column('type', sa.Unicode(16), nullable=False, index=True),
398 ),
280 sa.Column('statement', sa.UnicodeText, nullable=False, server_default=''),
399 sa.Column(
281 sa.Column('parameters', sa.dialects.postgresql.JSON, nullable=False),
400 "duration", sa.Float(), nullable=False, server_default="0", index=True
282 sa.Column('location', sa.UnicodeText, server_default=''),
401 ),
283 sa.Column('subtype', sa.Unicode(16), nullable=False, index=True),
402 sa.Column(
284 sa.Column('resource_id', sa.Integer, nullable=False, index=True),
403 "timestamp",
285 sa.Column('statement_hash', sa.Unicode(60), index=True)
404 sa.DateTime,
405 nullable=False,
406 server_default=sa.func.now(),
407 index=True,
408 ),
409 sa.Column(
410 "report_group_time",
411 sa.DateTime,
412 index=True,
413 nullable=False,
414 server_default=sa.func.now(),
415 ),
416 sa.Column("type", sa.Unicode(16), nullable=False, index=True),
417 sa.Column("statement", sa.UnicodeText, nullable=False, server_default=""),
418 sa.Column("parameters", sa.dialects.postgresql.JSON, nullable=False),
419 sa.Column("location", sa.UnicodeText, server_default=""),
420 sa.Column("subtype", sa.Unicode(16), nullable=False, index=True),
421 sa.Column("resource_id", sa.Integer, nullable=False, index=True),
422 sa.Column("statement_hash", sa.Unicode(60), index=True),
423 )
286 )
424
287
425 op.create_table(
288 op.create_table(
426 "tags",
289 'tags',
427 sa.Column("id", sa.types.BigInteger, primary_key=True),
290 sa.Column('id', sa.types.BigInteger, primary_key=True),
428 sa.Column(
291 sa.Column('resource_id', sa.Integer,
429 "resource_id",
292 sa.ForeignKey('resources.resource_id', onupdate='cascade',
430 sa.Integer,
293 ondelete='cascade')),
431 sa.ForeignKey(
294 sa.Column('first_timestamp', sa.DateTime, nullable=False, server_default=sa.func.now()),
432 "resources.resource_id", onupdate="cascade", ondelete="cascade"
295 sa.Column('last_timestamp', sa.DateTime, nullable=False, server_default=sa.func.now()),
433 ),
296 sa.Column('name', sa.Unicode(32), nullable=False),
434 ),
297 sa.Column('value', sa.dialects.postgresql.JSON, nullable=False),
435 sa.Column(
298 sa.Column('times_seen', sa.Integer, nullable=False, server_default='1')
436 "first_timestamp", sa.DateTime, nullable=False, server_default=sa.func.now()
437 ),
438 sa.Column(
439 "last_timestamp", sa.DateTime, nullable=False, server_default=sa.func.now()
440 ),
441 sa.Column("name", sa.Unicode(32), nullable=False),
442 sa.Column("value", sa.dialects.postgresql.JSON, nullable=False),
443 sa.Column("times_seen", sa.Integer, nullable=False, server_default="1"),
444 )
299 )
445
300
446 op.create_table(
301 op.create_table(
447 "auth_tokens",
302 'auth_tokens',
448 sa.Column("id", sa.Integer, nullable=False, primary_key=True),
303 sa.Column('id', sa.Integer, nullable=False, primary_key=True),
449 sa.Column("token", sa.Unicode),
304 sa.Column('token', sa.Unicode),
450 sa.Column(
305 sa.Column('creation_date', sa.DateTime, nullable=False, server_default=sa.func.now()),
451 "creation_date", sa.DateTime, nullable=False, server_default=sa.func.now()
306 sa.Column('expires', sa.DateTime),
452 ),
307 sa.Column('owner_id', sa.Integer,
453 sa.Column("expires", sa.DateTime),
308 sa.ForeignKey('users.id', onupdate='cascade',
454 sa.Column(
309 ondelete='cascade')),
455 "owner_id",
310 sa.Column('description', sa.Unicode),
456 sa.Integer,
457 sa.ForeignKey("users.id", onupdate="cascade", ondelete="cascade"),
458 ),
459 sa.Column("description", sa.Unicode),
460 )
311 )
461
312
462 op.create_table(
313 op.create_table(
463 "channels_actions",
314 'channels_actions',
464 sa.Column(
315 sa.Column('channel_pkey', sa.Integer,
465 "channel_pkey",
316 sa.ForeignKey('alert_channels.pkey',
466 sa.Integer,
317 ondelete='CASCADE', onupdate='CASCADE')),
467 sa.ForeignKey(
318 sa.Column('action_pkey', sa.Integer,
468 "alert_channels.pkey", ondelete="CASCADE", onupdate="CASCADE"
319 sa.ForeignKey('alert_channels_actions.pkey',
469 ),
320 ondelete='CASCADE', onupdate='CASCADE'))
470 ),
471 sa.Column(
472 "action_pkey",
473 sa.Integer,
474 sa.ForeignKey(
475 "alert_channels_actions.pkey", ondelete="CASCADE", onupdate="CASCADE"
476 ),
477 ),
478 )
321 )
479
322
480 op.create_table(
323 op.create_table(
481 "config",
324 'config',
482 sa.Column("key", sa.Unicode(128), primary_key=True),
325 sa.Column('key', sa.Unicode(128), primary_key=True),
483 sa.Column("section", sa.Unicode(128), primary_key=True),
326 sa.Column('section', sa.Unicode(128), primary_key=True),
484 sa.Column("value", sa.dialects.postgresql.JSON, server_default="{}"),
327 sa.Column('value', sa.dialects.postgresql.JSON,
328 server_default="{}")
485 )
329 )
486
330
487 op.create_table(
331 op.create_table(
488 "plugin_configs",
332 'plugin_configs',
489 sa.Column("id", sa.Integer, primary_key=True),
333 sa.Column('id', sa.Integer, primary_key=True),
490 sa.Column("plugin_name", sa.Unicode(128)),
334 sa.Column('plugin_name', sa.Unicode(128)),
491 sa.Column("section", sa.Unicode(128)),
335 sa.Column('section', sa.Unicode(128)),
492 sa.Column("config", sa.dialects.postgresql.JSON, server_default="{}"),
336 sa.Column('config', sa.dialects.postgresql.JSON,
493 sa.Column(
337 server_default="{}"),
494 "resource_id",
338 sa.Column('resource_id', sa.Integer(),
495 sa.Integer(),
339 sa.ForeignKey('resources.resource_id', onupdate='cascade',
496 sa.ForeignKey(
340 ondelete='cascade')),
497 "resources.resource_id", onupdate="cascade", ondelete="cascade"
341 sa.Column('owner_id', sa.Integer(),
498 ),
342 sa.ForeignKey('users.id', onupdate='cascade',
499 ),
343 ondelete='cascade')))
500 sa.Column(
501 "owner_id",
502 sa.Integer(),
503 sa.ForeignKey("users.id", onupdate="cascade", ondelete="cascade"),
504 ),
505 )
506
344
507 op.create_table(
345 op.create_table(
508 "rc_versions",
346 'rc_versions',
509 sa.Column("name", sa.Unicode(40), primary_key=True),
347 sa.Column('name', sa.Unicode(40), primary_key=True),
510 sa.Column("value", sa.Unicode(40)),
348 sa.Column('value', sa.Unicode(40)),
511 )
512 version_table = sa.table(
513 "rc_versions",
514 sa.Column("name", sa.Unicode(40)),
515 sa.Column("value", sa.Unicode(40)),
516 )
349 )
350 version_table = sa.table('rc_versions',
351 sa.Column('name', sa.Unicode(40)),
352 sa.Column('value', sa.Unicode(40)))
517
353
518 insert = version_table.insert().values(name="es_reports")
354 insert = version_table.insert().values(name='es_reports')
519 op.execute(insert)
355 op.execute(insert)
520 insert = version_table.insert().values(name="es_reports_groups")
356 insert = version_table.insert().values(name='es_reports_groups')
521 op.execute(insert)
357 op.execute(insert)
522 insert = version_table.insert().values(name="es_reports_stats")
358 insert = version_table.insert().values(name='es_reports_stats')
523 op.execute(insert)
359 op.execute(insert)
524 insert = version_table.insert().values(name="es_logs")
360 insert = version_table.insert().values(name='es_logs')
525 op.execute(insert)
361 op.execute(insert)
526 insert = version_table.insert().values(name="es_metrics")
362 insert = version_table.insert().values(name='es_metrics')
527 op.execute(insert)
363 op.execute(insert)
528 insert = version_table.insert().values(name="es_slow_calls")
364 insert = version_table.insert().values(name='es_slow_calls')
529 op.execute(insert)
365 op.execute(insert)
530
366
531 op.execute(
367
532 """
368 op.execute('''
533 CREATE OR REPLACE FUNCTION floor_time_5min(timestamp without time zone)
369 CREATE OR REPLACE FUNCTION floor_time_5min(timestamp without time zone)
534 RETURNS timestamp without time zone AS
370 RETURNS timestamp without time zone AS
535 $BODY$SELECT date_trunc('hour', $1) + INTERVAL '5 min' * FLOOR(date_part('minute', $1) / 5.0)$BODY$
371 $BODY$SELECT date_trunc('hour', $1) + INTERVAL '5 min' * FLOOR(date_part('minute', $1) / 5.0)$BODY$
536 LANGUAGE sql VOLATILE;
372 LANGUAGE sql VOLATILE;
537 """
373 ''')
538 )
539
374
540 op.execute(
375 op.execute('''
541 """
542 CREATE OR REPLACE FUNCTION partition_logs() RETURNS trigger
376 CREATE OR REPLACE FUNCTION partition_logs() RETURNS trigger
543 LANGUAGE plpgsql SECURITY DEFINER
377 LANGUAGE plpgsql SECURITY DEFINER
544 AS $$
378 AS $$
@@ -592,17 +426,13 b' def upgrade():'
592 RETURN NULL;
426 RETURN NULL;
593 END
427 END
594 $$;
428 $$;
595 """
429 ''')
596 )
597
430
598 op.execute(
431 op.execute('''
599 """
600 CREATE TRIGGER partition_logs BEFORE INSERT ON logs FOR EACH ROW EXECUTE PROCEDURE partition_logs();
432 CREATE TRIGGER partition_logs BEFORE INSERT ON logs FOR EACH ROW EXECUTE PROCEDURE partition_logs();
601 """
433 ''')
602 )
603
434
604 op.execute(
435 op.execute('''
605 """
606 CREATE OR REPLACE FUNCTION partition_metrics() RETURNS trigger
436 CREATE OR REPLACE FUNCTION partition_metrics() RETURNS trigger
607 LANGUAGE plpgsql SECURITY DEFINER
437 LANGUAGE plpgsql SECURITY DEFINER
608 AS $$
438 AS $$
@@ -633,17 +463,13 b' def upgrade():'
633 RETURN NULL;
463 RETURN NULL;
634 END
464 END
635 $$;
465 $$;
636 """
466 ''')
637 )
638
467
639 op.execute(
468 op.execute('''
640 """
641 CREATE TRIGGER partition_metrics BEFORE INSERT ON metrics FOR EACH ROW EXECUTE PROCEDURE partition_metrics();
469 CREATE TRIGGER partition_metrics BEFORE INSERT ON metrics FOR EACH ROW EXECUTE PROCEDURE partition_metrics();
642 """
470 ''')
643 )
644
471
645 op.execute(
472 op.execute('''
646 """
647 CREATE FUNCTION partition_reports_stats() RETURNS trigger
473 CREATE FUNCTION partition_reports_stats() RETURNS trigger
648 LANGUAGE plpgsql SECURITY DEFINER
474 LANGUAGE plpgsql SECURITY DEFINER
649 AS $$
475 AS $$
@@ -673,17 +499,13 b' def upgrade():'
673 RETURN NULL;
499 RETURN NULL;
674 END
500 END
675 $$;
501 $$;
676 """
502 ''')
677 )
678
503
679 op.execute(
504 op.execute('''
680 """
681 CREATE TRIGGER partition_reports_stats BEFORE INSERT ON reports_stats FOR EACH ROW EXECUTE PROCEDURE partition_reports_stats();
505 CREATE TRIGGER partition_reports_stats BEFORE INSERT ON reports_stats FOR EACH ROW EXECUTE PROCEDURE partition_reports_stats();
682 """
506 ''')
683 )
684
507
685 op.execute(
508 op.execute('''
686 """
687 CREATE OR REPLACE FUNCTION partition_reports_groups() RETURNS trigger
509 CREATE OR REPLACE FUNCTION partition_reports_groups() RETURNS trigger
688 LANGUAGE plpgsql SECURITY DEFINER
510 LANGUAGE plpgsql SECURITY DEFINER
689 AS $$
511 AS $$
@@ -711,17 +533,13 b' def upgrade():'
711 RETURN NULL;
533 RETURN NULL;
712 END
534 END
713 $$;
535 $$;
714 """
536 ''')
715 )
716
537
717 op.execute(
538 op.execute('''
718 """
719 CREATE TRIGGER partition_reports_groups BEFORE INSERT ON reports_groups FOR EACH ROW EXECUTE PROCEDURE partition_reports_groups();
539 CREATE TRIGGER partition_reports_groups BEFORE INSERT ON reports_groups FOR EACH ROW EXECUTE PROCEDURE partition_reports_groups();
720 """
540 ''')
721 )
722
541
723 op.execute(
542 op.execute('''
724 """
725 CREATE OR REPLACE FUNCTION partition_reports() RETURNS trigger
543 CREATE OR REPLACE FUNCTION partition_reports() RETURNS trigger
726 LANGUAGE plpgsql SECURITY DEFINER
544 LANGUAGE plpgsql SECURITY DEFINER
727 AS $$
545 AS $$
@@ -755,17 +573,14 b' def upgrade():'
755 RETURN NULL;
573 RETURN NULL;
756 END
574 END
757 $$;
575 $$;
758 """
576 ''')
759 )
760
577
761 op.execute(
578 op.execute('''
762 """
763 CREATE TRIGGER partition_reports BEFORE INSERT ON reports FOR EACH ROW EXECUTE PROCEDURE partition_reports();
579 CREATE TRIGGER partition_reports BEFORE INSERT ON reports FOR EACH ROW EXECUTE PROCEDURE partition_reports();
764 """
580 ''')
765 )
581
766
582
767 op.execute(
583 op.execute('''
768 """
769 CREATE OR REPLACE FUNCTION partition_slow_calls() RETURNS trigger
584 CREATE OR REPLACE FUNCTION partition_slow_calls() RETURNS trigger
770 LANGUAGE plpgsql SECURITY DEFINER
585 LANGUAGE plpgsql SECURITY DEFINER
771 AS $$
586 AS $$
@@ -799,15 +614,11 b' def upgrade():'
799 RETURN NULL;
614 RETURN NULL;
800 END
615 END
801 $$;
616 $$;
802 """
617 ''')
803 )
804
618
805 op.execute(
619 op.execute('''
806 """
807 CREATE TRIGGER partition_slow_calls BEFORE INSERT ON slow_calls FOR EACH ROW EXECUTE PROCEDURE partition_slow_calls();
620 CREATE TRIGGER partition_slow_calls BEFORE INSERT ON slow_calls FOR EACH ROW EXECUTE PROCEDURE partition_slow_calls();
808 """
621 ''')
809 )
810
811
622
812 def downgrade():
623 def downgrade():
813 pass
624 pass
@@ -7,8 +7,8 b' Create Date: 2018-02-28 13:52:50.717217'
7 """
7 """
8
8
9 # revision identifiers, used by Alembic.
9 # revision identifiers, used by Alembic.
10 revision = "e9fcfbdd9498"
10 revision = 'e9fcfbdd9498'
11 down_revision = "55b6e612672f"
11 down_revision = '55b6e612672f'
12
12
13 from alembic import op
13 from alembic import op
14 import sqlalchemy as sa
14 import sqlalchemy as sa
@@ -16,25 +16,17 b' import sqlalchemy as sa'
16
16
17 def upgrade():
17 def upgrade():
18 op.create_table(
18 op.create_table(
19 "channels_resources",
19 'channels_resources',
20 sa.Column(
20 sa.Column('channel_pkey', sa.Integer,
21 "channel_pkey",
21 sa.ForeignKey('alert_channels.pkey',
22 sa.Integer,
22 ondelete='CASCADE', onupdate='CASCADE'),
23 sa.ForeignKey(
23 primary_key=True),
24 "alert_channels.pkey", ondelete="CASCADE", onupdate="CASCADE"
24 sa.Column('resource_id', sa.Integer,
25 ),
25 sa.ForeignKey('resources.resource_id',
26 primary_key=True,
26 ondelete='CASCADE', onupdate='CASCADE'),
27 ),
27 primary_key=True)
28 sa.Column(
29 "resource_id",
30 sa.Integer,
31 sa.ForeignKey(
32 "resources.resource_id", ondelete="CASCADE", onupdate="CASCADE"
33 ),
34 primary_key=True,
35 ),
36 )
28 )
37
29
38
30
39 def downgrade():
31 def downgrade():
40 op.drop_table("channels_resources")
32 op.drop_table('channels_resources')
@@ -29,11 +29,11 b' log = logging.getLogger(__name__)'
29 DBSession = scoped_session(sessionmaker(extension=ZopeTransactionExtension()))
29 DBSession = scoped_session(sessionmaker(extension=ZopeTransactionExtension()))
30
30
31 NAMING_CONVENTION = {
31 NAMING_CONVENTION = {
32 "ix": "ix_%(column_0_label)s",
32 "ix": 'ix_%(column_0_label)s',
33 "uq": "uq_%(table_name)s_%(column_0_name)s",
33 "uq": "uq_%(table_name)s_%(column_0_name)s",
34 "ck": "ck_%(table_name)s_%(constraint_name)s",
34 "ck": "ck_%(table_name)s_%(constraint_name)s",
35 "fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s",
35 "fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s",
36 "pk": "pk_%(table_name)s",
36 "pk": "pk_%(table_name)s"
37 }
37 }
38
38
39 metadata = MetaData(naming_convention=NAMING_CONVENTION)
39 metadata = MetaData(naming_convention=NAMING_CONVENTION)
@@ -59,24 +59,23 b' class SliceableESQuery(object):'
59 self.query = query
59 self.query = query
60 self.sort_query = sort_query
60 self.sort_query = sort_query
61 self.aggregations = aggregations
61 self.aggregations = aggregations
62 self.items_per_page = kwconfig.pop("items_per_page", 10)
62 self.items_per_page = kwconfig.pop('items_per_page', 10)
63 self.page = kwconfig.pop("page", 1)
63 self.page = kwconfig.pop('page', 1)
64 self.kwconfig = kwconfig
64 self.kwconfig = kwconfig
65 self.result = None
65 self.result = None
66
66
67 def __getitem__(self, index):
67 def __getitem__(self, index):
68 config = self.kwconfig.copy()
68 config = self.kwconfig.copy()
69 config["from_"] = index.start
69 config['es_from'] = index.start
70 query = self.query.copy()
70 query = self.query.copy()
71 if self.sort_query:
71 if self.sort_query:
72 query.update(self.sort_query)
72 query.update(self.sort_query)
73 self.result = Datastores.es.search(
73 self.result = Datastores.es.search(query, size=self.items_per_page,
74 body=query, size=self.items_per_page, **config
74 **config)
75 )
76 if self.aggregations:
75 if self.aggregations:
77 self.items = self.result.get("aggregations")
76 self.items = self.result.get('aggregations')
78 else:
77 else:
79 self.items = self.result["hits"]["hits"]
78 self.items = self.result['hits']['hits']
80
79
81 return self.items
80 return self.items
82
81
@@ -86,15 +85,14 b' class SliceableESQuery(object):'
86 def __len__(self):
85 def __len__(self):
87 config = self.kwconfig.copy()
86 config = self.kwconfig.copy()
88 query = self.query.copy()
87 query = self.query.copy()
89 self.result = Datastores.es.search(
88 self.result = Datastores.es.search(query, size=self.items_per_page,
90 body=query, size=self.items_per_page, **config
89 **config)
91 )
92 if self.aggregations:
90 if self.aggregations:
93 self.items = self.result.get("aggregations")
91 self.items = self.result.get('aggregations')
94 else:
92 else:
95 self.items = self.result["hits"]["hits"]
93 self.items = self.result['hits']['hits']
96
94
97 count = int(self.result["hits"]["total"])
95 count = int(self.result['hits']['total'])
98 return count if count < 5000 else 5000
96 return count if count < 5000 else 5000
99
97
100
98
@@ -104,7 +102,8 b' from appenlight.models.user import User'
104 from appenlight.models.alert_channel import AlertChannel
102 from appenlight.models.alert_channel import AlertChannel
105 from appenlight.models.alert_channel_action import AlertChannelAction
103 from appenlight.models.alert_channel_action import AlertChannelAction
106 from appenlight.models.metric import Metric
104 from appenlight.models.metric import Metric
107 from appenlight.models.application_postprocess_conf import ApplicationPostprocessConf
105 from appenlight.models.application_postprocess_conf import \
106 ApplicationPostprocessConf
108 from appenlight.models.auth_token import AuthToken
107 from appenlight.models.auth_token import AuthToken
109 from appenlight.models.event import Event
108 from appenlight.models.event import Event
110 from appenlight.models.external_identity import ExternalIdentity
109 from appenlight.models.external_identity import ExternalIdentity
@@ -125,15 +124,7 b' from appenlight.models.user_permission import UserPermission'
125 from appenlight.models.user_resource_permission import UserResourcePermission
124 from appenlight.models.user_resource_permission import UserResourcePermission
126 from ziggurat_foundations import ziggurat_model_init
125 from ziggurat_foundations import ziggurat_model_init
127
126
128 ziggurat_model_init(
127 ziggurat_model_init(User, Group, UserGroup, GroupPermission, UserPermission,
129 User,
128 UserResourcePermission, GroupResourcePermission,
130 Group,
129 Resource,
131 UserGroup,
130 ExternalIdentity, passwordmanager=None)
132 GroupPermission,
133 UserPermission,
134 UserResourcePermission,
135 GroupResourcePermission,
136 Resource,
137 ExternalIdentity,
138 passwordmanager=None,
139 )
@@ -27,125 +27,126 b' log = logging.getLogger(__name__)'
27
27
28 #
28 #
29 channel_rules_m2m_table = sa.Table(
29 channel_rules_m2m_table = sa.Table(
30 "channels_actions",
30 'channels_actions', Base.metadata,
31 Base.metadata,
31 sa.Column('channel_pkey', sa.Integer,
32 sa.Column("channel_pkey", sa.Integer, sa.ForeignKey("alert_channels.pkey")),
32 sa.ForeignKey('alert_channels.pkey')),
33 sa.Column("action_pkey", sa.Integer, sa.ForeignKey("alert_channels_actions.pkey")),
33 sa.Column('action_pkey', sa.Integer,
34 sa.ForeignKey('alert_channels_actions.pkey'))
34 )
35 )
35
36
36 channel_resources_m2m_table = sa.Table(
37 channel_resources_m2m_table = sa.Table(
37 "channels_resources",
38 'channels_resources', Base.metadata,
38 Base.metadata,
39 sa.Column('channel_pkey', sa.Integer,
39 sa.Column("channel_pkey", sa.Integer, sa.ForeignKey("alert_channels.pkey")),
40 sa.ForeignKey('alert_channels.pkey')),
40 sa.Column("resource_id", sa.Integer, sa.ForeignKey("resources.resource_id")),
41 sa.Column('resource_id', sa.Integer,
42 sa.ForeignKey('resources.resource_id'))
41 )
43 )
42
44
43 DATE_FRMT = "%Y-%m-%dT%H:%M"
45 DATE_FRMT = '%Y-%m-%dT%H:%M'
44
46
45
47
46 class AlertChannel(Base, BaseModel):
48 class AlertChannel(Base, BaseModel):
47 """
49 """
48 Stores information about possible alerting options
50 Stores information about possible alerting options
49 """
51 """
50
52 __tablename__ = 'alert_channels'
51 __tablename__ = "alert_channels"
53 __possible_channel_names__ = ['email']
52 __possible_channel_names__ = ["email"]
53 __mapper_args__ = {
54 __mapper_args__ = {
54 "polymorphic_on": "channel_name",
55 'polymorphic_on': 'channel_name',
55 "polymorphic_identity": "integration",
56 'polymorphic_identity': 'integration'
56 }
57 }
57
58
58 owner_id = sa.Column(
59 owner_id = sa.Column(sa.Unicode(30),
59 sa.Unicode(30),
60 sa.ForeignKey('users.id', onupdate='CASCADE',
60 sa.ForeignKey("users.id", onupdate="CASCADE", ondelete="CASCADE"),
61 ondelete='CASCADE'))
61 )
62 channel_name = sa.Column(sa.Unicode(25), nullable=False)
62 channel_name = sa.Column(sa.Unicode(25), nullable=False)
63 channel_value = sa.Column(sa.Unicode(80), nullable=False, default="")
63 channel_value = sa.Column(sa.Unicode(80), nullable=False, default='')
64 channel_json_conf = sa.Column(JSON(), nullable=False, default="")
64 channel_json_conf = sa.Column(JSON(), nullable=False, default='')
65 channel_validated = sa.Column(sa.Boolean, nullable=False, default=False)
65 channel_validated = sa.Column(sa.Boolean, nullable=False,
66 send_alerts = sa.Column(sa.Boolean, nullable=False, default=True)
66 default=False)
67 daily_digest = sa.Column(sa.Boolean, nullable=False, default=True)
67 send_alerts = sa.Column(sa.Boolean, nullable=False,
68 integration_id = sa.Column(
68 default=True)
69 sa.Integer, sa.ForeignKey("integrations.id"), nullable=True
69 daily_digest = sa.Column(sa.Boolean, nullable=False,
70 )
70 default=True)
71 integration_id = sa.Column(sa.Integer, sa.ForeignKey('integrations.id'),
72 nullable=True)
71 pkey = sa.Column(sa.Integer(), nullable=False, primary_key=True)
73 pkey = sa.Column(sa.Integer(), nullable=False, primary_key=True)
72
74
73 channel_actions = sa.orm.relationship(
75 channel_actions = sa.orm.relationship('AlertChannelAction',
74 "AlertChannelAction",
76 cascade="all",
75 cascade="all",
77 passive_deletes=True,
76 passive_deletes=True,
78 passive_updates=True,
77 passive_updates=True,
79 secondary=channel_rules_m2m_table,
78 secondary=channel_rules_m2m_table,
80 backref='channels')
79 backref="channels",
81 resources = sa.orm.relationship('Resource',
80 )
82 cascade="all",
81 resources = sa.orm.relationship(
83 passive_deletes=True,
82 "Resource",
84 passive_updates=True,
83 cascade="all",
85 secondary=channel_resources_m2m_table,
84 passive_deletes=True,
86 backref='resources')
85 passive_updates=True,
86 secondary=channel_resources_m2m_table,
87 backref="resources",
88 )
89
87
90 @property
88 @property
91 def channel_visible_value(self):
89 def channel_visible_value(self):
92 if self.integration:
90 if self.integration:
93 return "{}: {}".format(
91 return '{}: {}'.format(
94 self.channel_name, self.integration.resource.resource_name
92 self.channel_name,
93 self.integration.resource.resource_name
95 )
94 )
96
95
97 return "{}: {}".format(self.channel_name, self.channel_value)
96 return '{}: {}'.format(
97 self.channel_name,
98 self.channel_value
99 )
98
100
99 def get_dict(self, exclude_keys=None, include_keys=None, extended_info=True):
101 def get_dict(self, exclude_keys=None, include_keys=None,
102 extended_info=True):
100 """
103 """
101 Returns dictionary with required information that will be consumed by
104 Returns dictionary with required information that will be consumed by
102 angular
105 angular
103 """
106 """
104 instance_dict = super(AlertChannel, self).get_dict(exclude_keys, include_keys)
107 instance_dict = super(AlertChannel, self).get_dict(exclude_keys,
108 include_keys)
105 exclude_keys_list = exclude_keys or []
109 exclude_keys_list = exclude_keys or []
106 include_keys_list = include_keys or []
110 include_keys_list = include_keys or []
107
111
108 instance_dict["supports_report_alerting"] = True
112 instance_dict['supports_report_alerting'] = True
109 instance_dict["channel_visible_value"] = self.channel_visible_value
113 instance_dict['channel_visible_value'] = self.channel_visible_value
110
114
111 if extended_info:
115 if extended_info:
112 instance_dict["actions"] = [
116 instance_dict['actions'] = [
113 rule.get_dict(extended_info=True) for rule in self.channel_actions
117 rule.get_dict(extended_info=True) for
114 ]
118 rule in self.channel_actions]
115
119
116 del instance_dict["channel_json_conf"]
120 del instance_dict['channel_json_conf']
117
121
118 if self.integration:
122 if self.integration:
119 instance_dict[
123 instance_dict[
120 "supports_report_alerting"
124 'supports_report_alerting'] = \
121 ] = self.integration.supports_report_alerting
125 self.integration.supports_report_alerting
122 d = {}
126 d = {}
123 for k in instance_dict.keys():
127 for k in instance_dict.keys():
124 if k not in exclude_keys_list and (
128 if (k not in exclude_keys_list and
125 k in include_keys_list or not include_keys
129 (k in include_keys_list or not include_keys)):
126 ):
127 d[k] = instance_dict[k]
130 d[k] = instance_dict[k]
128 return d
131 return d
129
132
130 def __repr__(self):
133 def __repr__(self):
131 return "<AlertChannel: (%s,%s), user:%s>" % (
134 return '<AlertChannel: (%s,%s), user:%s>' % (self.channel_name,
132 self.channel_name,
135 self.channel_value,
133 self.channel_value,
136 self.user_name,)
134 self.user_name,
135 )
136
137
137 def send_digest(self, **kwargs):
138 def send_digest(self, **kwargs):
138 """
139 """
139 This should implement daily top error report notifications
140 This should implement daily top error report notifications
140 """
141 """
141 log.warning("send_digest NOT IMPLEMENTED")
142 log.warning('send_digest NOT IMPLEMENTED')
142
143
143 def notify_reports(self, **kwargs):
144 def notify_reports(self, **kwargs):
144 """
145 """
145 This should implement notification of reports that occured in 1 min
146 This should implement notification of reports that occured in 1 min
146 interval
147 interval
147 """
148 """
148 log.warning("notify_reports NOT IMPLEMENTED")
149 log.warning('notify_reports NOT IMPLEMENTED')
149
150
150 def notify_alert(self, **kwargs):
151 def notify_alert(self, **kwargs):
151 """
152 """
@@ -159,85 +160,87 b' class AlertChannel(Base, BaseModel):'
159 request: request object
160 request: request object
160
161
161 """
162 """
162 alert_name = kwargs["event"].unified_alert_name()
163 alert_name = kwargs['event'].unified_alert_name()
163 if alert_name in ["slow_report_alert", "error_report_alert"]:
164 if alert_name in ['slow_report_alert', 'error_report_alert']:
164 self.notify_report_alert(**kwargs)
165 self.notify_report_alert(**kwargs)
165 elif alert_name == "uptime_alert":
166 elif alert_name == 'uptime_alert':
166 self.notify_uptime_alert(**kwargs)
167 self.notify_uptime_alert(**kwargs)
167 elif alert_name == "chart_alert":
168 elif alert_name == 'chart_alert':
168 self.notify_chart_alert(**kwargs)
169 self.notify_chart_alert(**kwargs)
169
170
170 def notify_chart_alert(self, **kwargs):
171 def notify_chart_alert(self, **kwargs):
171 """
172 """
172 This should implement report open/close alerts notifications
173 This should implement report open/close alerts notifications
173 """
174 """
174 log.warning("notify_chart_alert NOT IMPLEMENTED")
175 log.warning('notify_chart_alert NOT IMPLEMENTED')
175
176
176 def notify_report_alert(self, **kwargs):
177 def notify_report_alert(self, **kwargs):
177 """
178 """
178 This should implement report open/close alerts notifications
179 This should implement report open/close alerts notifications
179 """
180 """
180 log.warning("notify_report_alert NOT IMPLEMENTED")
181 log.warning('notify_report_alert NOT IMPLEMENTED')
181
182
182 def notify_uptime_alert(self, **kwargs):
183 def notify_uptime_alert(self, **kwargs):
183 """
184 """
184 This should implement uptime open/close alerts notifications
185 This should implement uptime open/close alerts notifications
185 """
186 """
186 log.warning("notify_uptime_alert NOT IMPLEMENTED")
187 log.warning('notify_uptime_alert NOT IMPLEMENTED')
187
188
188 def get_notification_basic_vars(self, kwargs):
189 def get_notification_basic_vars(self, kwargs):
189 """
190 """
190 Sets most common variables used later for rendering notifications for
191 Sets most common variables used later for rendering notifications for
191 channel
192 channel
192 """
193 """
193 if "event" in kwargs:
194 if 'event' in kwargs:
194 kwargs["since_when"] = kwargs["event"].start_date
195 kwargs['since_when'] = kwargs['event'].start_date
195
196
196 url_start_date = kwargs.get("since_when") - timedelta(minutes=1)
197 url_start_date = kwargs.get('since_when') - timedelta(minutes=1)
197 url_end_date = kwargs.get("since_when") + timedelta(minutes=4)
198 url_end_date = kwargs.get('since_when') + timedelta(minutes=4)
198 tmpl_vars = {
199 tmpl_vars = {
199 "timestamp": kwargs["since_when"],
200 "timestamp": kwargs['since_when'],
200 "user": kwargs["user"],
201 "user": kwargs['user'],
201 "since_when": kwargs.get("since_when"),
202 "since_when": kwargs.get('since_when'),
202 "url_start_date": url_start_date,
203 "url_start_date": url_start_date,
203 "url_end_date": url_end_date,
204 "url_end_date": url_end_date
204 }
205 }
205 tmpl_vars["resource_name"] = kwargs["resource"].resource_name
206 tmpl_vars["resource_name"] = kwargs['resource'].resource_name
206 tmpl_vars["resource"] = kwargs["resource"]
207 tmpl_vars["resource"] = kwargs['resource']
207
208
208 if "event" in kwargs:
209 if 'event' in kwargs:
209 tmpl_vars["event_values"] = kwargs["event"].values
210 tmpl_vars['event_values'] = kwargs['event'].values
210 tmpl_vars["alert_type"] = kwargs["event"].unified_alert_name()
211 tmpl_vars['alert_type'] = kwargs['event'].unified_alert_name()
211 tmpl_vars["alert_action"] = kwargs["event"].unified_alert_action()
212 tmpl_vars['alert_action'] = kwargs['event'].unified_alert_action()
212 return tmpl_vars
213 return tmpl_vars
213
214
214 def report_alert_notification_vars(self, kwargs):
215 def report_alert_notification_vars(self, kwargs):
215 tmpl_vars = self.get_notification_basic_vars(kwargs)
216 tmpl_vars = self.get_notification_basic_vars(kwargs)
216 reports = kwargs.get("reports", [])
217 reports = kwargs.get('reports', [])
217 tmpl_vars["reports"] = reports
218 tmpl_vars["reports"] = reports
218 tmpl_vars["confirmed_total"] = len(reports)
219 tmpl_vars["confirmed_total"] = len(reports)
219
220
220 tmpl_vars["report_type"] = "error reports"
221 tmpl_vars["report_type"] = "error reports"
221 tmpl_vars["url_report_type"] = "report/list"
222 tmpl_vars["url_report_type"] = 'report/list'
222
223
223 alert_type = tmpl_vars.get("alert_type", "")
224 alert_type = tmpl_vars.get('alert_type', '')
224 if "slow_report" in alert_type:
225 if 'slow_report' in alert_type:
225 tmpl_vars["report_type"] = "slow reports"
226 tmpl_vars["report_type"] = "slow reports"
226 tmpl_vars["url_report_type"] = "report/list_slow"
227 tmpl_vars["url_report_type"] = 'report/list_slow'
227
228
228 app_url = kwargs["request"].registry.settings["_mail_url"]
229 app_url = kwargs['request'].registry.settings['_mail_url']
229
230
230 destination_url = kwargs["request"].route_url("/", _app_url=app_url)
231 destination_url = kwargs['request'].route_url('/',
232 _app_url=app_url)
231 if alert_type:
233 if alert_type:
232 destination_url += "ui/{}?resource={}&start_date={}&end_date={}".format(
234 destination_url += 'ui/{}?resource={}&start_date={}&end_date={}'.format(
233 tmpl_vars["url_report_type"],
235 tmpl_vars["url_report_type"],
234 tmpl_vars["resource"].resource_id,
236 tmpl_vars['resource'].resource_id,
235 tmpl_vars["url_start_date"].strftime(DATE_FRMT),
237 tmpl_vars['url_start_date'].strftime(DATE_FRMT),
236 tmpl_vars["url_end_date"].strftime(DATE_FRMT),
238 tmpl_vars['url_end_date'].strftime(DATE_FRMT)
237 )
239 )
238 else:
240 else:
239 destination_url += "ui/{}?resource={}".format(
241 destination_url += 'ui/{}?resource={}'.format(
240 tmpl_vars["url_report_type"], tmpl_vars["resource"].resource_id
242 tmpl_vars["url_report_type"],
243 tmpl_vars['resource'].resource_id
241 )
244 )
242 tmpl_vars["destination_url"] = destination_url
245 tmpl_vars["destination_url"] = destination_url
243
246
@@ -245,54 +248,58 b' class AlertChannel(Base, BaseModel):'
245
248
246 def uptime_alert_notification_vars(self, kwargs):
249 def uptime_alert_notification_vars(self, kwargs):
247 tmpl_vars = self.get_notification_basic_vars(kwargs)
250 tmpl_vars = self.get_notification_basic_vars(kwargs)
248 app_url = kwargs["request"].registry.settings["_mail_url"]
251 app_url = kwargs['request'].registry.settings['_mail_url']
249 destination_url = kwargs["request"].route_url("/", _app_url=app_url)
252 destination_url = kwargs['request'].route_url('/', _app_url=app_url)
250 destination_url += "ui/{}?resource={}".format(
253 destination_url += 'ui/{}?resource={}'.format(
251 "uptime", tmpl_vars["resource"].resource_id
254 'uptime',
252 )
255 tmpl_vars['resource'].resource_id)
253 tmpl_vars["destination_url"] = destination_url
256 tmpl_vars['destination_url'] = destination_url
254
257
255 reason = ""
258 reason = ''
256 e_values = tmpl_vars.get("event_values")
259 e_values = tmpl_vars.get('event_values')
257
260
258 if e_values and e_values.get("response_time") == 0:
261 if e_values and e_values.get('response_time') == 0:
259 reason += " Response time was slower than 20 seconds."
262 reason += ' Response time was slower than 20 seconds.'
260 elif e_values:
263 elif e_values:
261 code = e_values.get("status_code")
264 code = e_values.get('status_code')
262 reason += " Response status code: %s." % code
265 reason += ' Response status code: %s.' % code
263
266
264 tmpl_vars["reason"] = reason
267 tmpl_vars['reason'] = reason
265 return tmpl_vars
268 return tmpl_vars
266
269
267 def chart_alert_notification_vars(self, kwargs):
270 def chart_alert_notification_vars(self, kwargs):
268 tmpl_vars = self.get_notification_basic_vars(kwargs)
271 tmpl_vars = self.get_notification_basic_vars(kwargs)
269 tmpl_vars["chart_name"] = tmpl_vars["event_values"]["chart_name"]
272 tmpl_vars['chart_name'] = tmpl_vars['event_values']['chart_name']
270 tmpl_vars["action_name"] = tmpl_vars["event_values"].get("action_name") or ""
273 tmpl_vars['action_name'] = tmpl_vars['event_values'].get(
271 matched_values = tmpl_vars["event_values"]["matched_step_values"]
274 'action_name') or ''
272 tmpl_vars["readable_values"] = []
275 matched_values = tmpl_vars['event_values']['matched_step_values']
273 for key, value in list(matched_values["values"].items()):
276 tmpl_vars['readable_values'] = []
274 matched_label = matched_values["labels"].get(key)
277 for key, value in list(matched_values['values'].items()):
278 matched_label = matched_values['labels'].get(key)
275 if matched_label:
279 if matched_label:
276 tmpl_vars["readable_values"].append(
280 tmpl_vars['readable_values'].append({
277 {"label": matched_label["human_label"], "value": value}
281 'label': matched_label['human_label'],
278 )
282 'value': value
279 tmpl_vars["readable_values"] = sorted(
283 })
280 tmpl_vars["readable_values"], key=lambda x: x["label"]
284 tmpl_vars['readable_values'] = sorted(tmpl_vars['readable_values'],
281 )
285 key=lambda x: x['label'])
282 start_date = convert_date(tmpl_vars["event_values"]["start_interval"])
286 start_date = convert_date(tmpl_vars['event_values']['start_interval'])
283 end_date = None
287 end_date = None
284 if tmpl_vars["event_values"].get("end_interval"):
288 if tmpl_vars['event_values'].get('end_interval'):
285 end_date = convert_date(tmpl_vars["event_values"]["end_interval"])
289 end_date = convert_date(tmpl_vars['event_values']['end_interval'])
286
290
287 app_url = kwargs["request"].registry.settings["_mail_url"]
291 app_url = kwargs['request'].registry.settings['_mail_url']
288 destination_url = kwargs["request"].route_url("/", _app_url=app_url)
292 destination_url = kwargs['request'].route_url('/', _app_url=app_url)
289 to_encode = {
293 to_encode = {
290 "resource": tmpl_vars["event_values"]["resource"],
294 'resource': tmpl_vars['event_values']['resource'],
291 "start_date": start_date.strftime(DATE_FRMT),
295 'start_date': start_date.strftime(DATE_FRMT),
292 }
296 }
293 if end_date:
297 if end_date:
294 to_encode["end_date"] = end_date.strftime(DATE_FRMT)
298 to_encode['end_date'] = end_date.strftime(DATE_FRMT)
295
299
296 destination_url += "ui/{}?{}".format("logs", urllib.parse.urlencode(to_encode))
300 destination_url += 'ui/{}?{}'.format(
297 tmpl_vars["destination_url"] = destination_url
301 'logs',
302 urllib.parse.urlencode(to_encode)
303 )
304 tmpl_vars['destination_url'] = destination_url
298 return tmpl_vars
305 return tmpl_vars
@@ -16,7 +16,7 b''
16
16
17 import sqlalchemy as sa
17 import sqlalchemy as sa
18
18
19 from ziggurat_foundations.models.services.resource import ResourceService
19 from appenlight.models.resource import Resource
20 from appenlight.models import Base, get_db_session
20 from appenlight.models import Base, get_db_session
21 from sqlalchemy.orm import validates
21 from sqlalchemy.orm import validates
22 from ziggurat_foundations.models.base import BaseModel
22 from ziggurat_foundations.models.base import BaseModel
@@ -27,42 +27,39 b' class AlertChannelAction(Base, BaseModel):'
27 Stores notifications conditions for user's alert channels
27 Stores notifications conditions for user's alert channels
28 This is later used for rule parsing like "alert if http_status == 500"
28 This is later used for rule parsing like "alert if http_status == 500"
29 """
29 """
30 __tablename__ = 'alert_channels_actions'
30
31
31 __tablename__ = "alert_channels_actions"
32 types = ['report', 'chart']
32
33
33 types = ["report", "chart"]
34 owner_id = sa.Column(sa.Integer,
34
35 sa.ForeignKey('users.id', onupdate='CASCADE',
35 owner_id = sa.Column(
36 ondelete='CASCADE'))
36 sa.Integer, sa.ForeignKey("users.id", onupdate="CASCADE", ondelete="CASCADE")
37 )
38 resource_id = sa.Column(sa.Integer())
37 resource_id = sa.Column(sa.Integer())
39 action = sa.Column(sa.Unicode(10), nullable=False, default="always")
38 action = sa.Column(sa.Unicode(10), nullable=False, default='always')
40 type = sa.Column(sa.Unicode(10), nullable=False)
39 type = sa.Column(sa.Unicode(10), nullable=False)
41 other_id = sa.Column(sa.Unicode(40))
40 other_id = sa.Column(sa.Unicode(40))
42 pkey = sa.Column(sa.Integer(), nullable=False, primary_key=True)
41 pkey = sa.Column(sa.Integer(), nullable=False, primary_key=True)
43 rule = sa.Column(
42 rule = sa.Column(sa.dialects.postgresql.JSON,
44 sa.dialects.postgresql.JSON,
43 nullable=False, default={'field': 'http_status',
45 nullable=False,
44 "op": "ge", "value": "500"})
46 default={"field": "http_status", "op": "ge", "value": "500"},
47 )
48 config = sa.Column(sa.dialects.postgresql.JSON)
45 config = sa.Column(sa.dialects.postgresql.JSON)
49 name = sa.Column(sa.Unicode(255))
46 name = sa.Column(sa.Unicode(255))
50
47
51 @validates("notify_type")
48 @validates('notify_type')
52 def validate_email(self, key, notify_type):
49 def validate_email(self, key, notify_type):
53 assert notify_type in ["always", "only_first"]
50 assert notify_type in ['always', 'only_first']
54 return notify_type
51 return notify_type
55
52
56 def resource_name(self, db_session=None):
53 def resource_name(self, db_session=None):
57 db_session = get_db_session(db_session)
54 db_session = get_db_session(db_session)
58 if self.resource_id:
55 if self.resource_id:
59 return ResourceService.by_resource_id(
56 return Resource.by_resource_id(self.resource_id,
60 self.resource_id, db_session=db_session
57 db_session=db_session).resource_name
61 ).resource_name
62 else:
58 else:
63 return "any resource"
59 return 'any resource'
64
60
65 def get_dict(self, exclude_keys=None, include_keys=None, extended_info=False):
61 def get_dict(self, exclude_keys=None, include_keys=None,
62 extended_info=False):
66 """
63 """
67 Returns dictionary with required information that will be consumed by
64 Returns dictionary with required information that will be consumed by
68 angular
65 angular
@@ -71,14 +68,12 b' class AlertChannelAction(Base, BaseModel):'
71 exclude_keys_list = exclude_keys or []
68 exclude_keys_list = exclude_keys or []
72 include_keys_list = include_keys or []
69 include_keys_list = include_keys or []
73 if extended_info:
70 if extended_info:
74 instance_dict["channels"] = [
71 instance_dict['channels'] = [
75 c.get_dict(extended_info=False) for c in self.channels
72 c.get_dict(extended_info=False) for c in self.channels]
76 ]
77
73
78 d = {}
74 d = {}
79 for k in instance_dict.keys():
75 for k in instance_dict.keys():
80 if k not in exclude_keys_list and (
76 if (k not in exclude_keys_list and
81 k in include_keys_list or not include_keys
77 (k in include_keys_list or not include_keys)):
82 ):
83 d[k] = instance_dict[k]
78 d[k] = instance_dict[k]
84 return d
79 return d
@@ -13,3 +13,4 b''
13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 # See the License for the specific language governing permissions and
14 # See the License for the specific language governing permissions and
15 # limitations under the License.
15 # limitations under the License.
16
@@ -23,13 +23,15 b' log = logging.getLogger(__name__)'
23
23
24
24
25 class CampfireAlertChannel(AlertChannel):
25 class CampfireAlertChannel(AlertChannel):
26 __mapper_args__ = {"polymorphic_identity": "campfire"}
26 __mapper_args__ = {
27 'polymorphic_identity': 'campfire'
28 }
27
29
28 @property
30 @property
29 def client(self):
31 def client(self):
30 client = CampfireIntegration.create_client(
32 client = CampfireIntegration.create_client(
31 self.integration.config["api_token"], self.integration.config["account"]
33 self.integration.config['api_token'],
32 )
34 self.integration.config['account'])
33 return client
35 return client
34
36
35 def notify_reports(self, **kwargs):
37 def notify_reports(self, **kwargs):
@@ -46,40 +48,37 b' class CampfireAlertChannel(AlertChannel):'
46 """
48 """
47 template_vars = self.report_alert_notification_vars(kwargs)
49 template_vars = self.report_alert_notification_vars(kwargs)
48
50
49 app_url = kwargs["request"].registry.settings["_mail_url"]
51 app_url = kwargs['request'].registry.settings['_mail_url']
50 destination_url = kwargs["request"].route_url("/", app_url=app_url)
52 destination_url = kwargs['request'].route_url('/',
51 f_args = (
53 app_url=app_url)
52 "report",
54 f_args = ('report',
53 template_vars["resource"].resource_id,
55 template_vars['resource'].resource_id,
54 template_vars["url_start_date"].strftime("%Y-%m-%dT%H:%M"),
56 template_vars['url_start_date'].strftime('%Y-%m-%dT%H:%M'),
55 template_vars["url_end_date"].strftime("%Y-%m-%dT%H:%M"),
57 template_vars['url_end_date'].strftime('%Y-%m-%dT%H:%M'))
56 )
58 destination_url += 'ui/{}?resource={}&start_date={}&end_date={}'.format(
57 destination_url += "ui/{}?resource={}&start_date={}&end_date={}".format(*f_args)
59 *f_args)
58
60
59 if template_vars["confirmed_total"] > 1:
61 if template_vars['confirmed_total'] > 1:
60 template_vars["title"] = "%s - %s reports" % (
62 template_vars["title"] = "%s - %s reports" % (
61 template_vars["resource_name"],
63 template_vars['resource_name'],
62 template_vars["confirmed_total"],
64 template_vars['confirmed_total'],
63 )
65 )
64 else:
66 else:
65 error_title = truncate(
67 error_title = truncate(template_vars['reports'][0][1].error or
66 template_vars["reports"][0][1].error or "slow report", 90
68 'slow report', 90)
67 )
68 template_vars["title"] = "%s - '%s' report" % (
69 template_vars["title"] = "%s - '%s' report" % (
69 template_vars["resource_name"],
70 template_vars['resource_name'],
70 error_title,
71 error_title)
71 )
72
72
73 template_vars["title"] += " " + destination_url
73 template_vars["title"] += ' ' + destination_url
74
74
75 log_msg = "NOTIFY : %s via %s :: %s reports" % (
75 log_msg = 'NOTIFY : %s via %s :: %s reports' % (
76 kwargs["user"].user_name,
76 kwargs['user'].user_name,
77 self.channel_visible_value,
77 self.channel_visible_value,
78 template_vars["confirmed_total"],
78 template_vars['confirmed_total'])
79 )
80 log.warning(log_msg)
79 log.warning(log_msg)
81
80
82 for room in self.integration.config["rooms"].split(","):
81 for room in self.integration.config['rooms'].split(','):
83 self.client.speak_to_room(room.strip(), template_vars["title"])
82 self.client.speak_to_room(room.strip(), template_vars["title"])
84
83
85 def notify_report_alert(self, **kwargs):
84 def notify_report_alert(self, **kwargs):
@@ -95,23 +94,23 b' class CampfireAlertChannel(AlertChannel):'
95 """
94 """
96 template_vars = self.report_alert_notification_vars(kwargs)
95 template_vars = self.report_alert_notification_vars(kwargs)
97
96
98 if kwargs["event"].unified_alert_action() == "OPEN":
97 if kwargs['event'].unified_alert_action() == 'OPEN':
99 title = "ALERT %s: %s - %s %s %s" % (
98 title = 'ALERT %s: %s - %s %s %s' % (
100 template_vars["alert_action"],
99 template_vars['alert_action'],
101 template_vars["resource_name"],
100 template_vars['resource_name'],
102 kwargs["event"].values["reports"],
101 kwargs['event'].values['reports'],
103 template_vars["report_type"],
102 template_vars['report_type'],
104 template_vars["destination_url"],
103 template_vars['destination_url']
105 )
104 )
106
105
107 else:
106 else:
108 title = "ALERT %s: %s type: %s" % (
107 title = 'ALERT %s: %s type: %s' % (
109 template_vars["alert_action"],
108 template_vars['alert_action'],
110 template_vars["resource_name"],
109 template_vars['resource_name'],
111 template_vars["alert_type"].replace("_", " "),
110 template_vars['alert_type'].replace('_', ' '),
112 )
111 )
113 for room in self.integration.config["rooms"].split(","):
112 for room in self.integration.config['rooms'].split(','):
114 self.client.speak_to_room(room.strip(), title, sound="VUVUZELA")
113 self.client.speak_to_room(room.strip(), title, sound='VUVUZELA')
115
114
116 def notify_uptime_alert(self, **kwargs):
115 def notify_uptime_alert(self, **kwargs):
117 """
116 """
@@ -126,15 +125,15 b' class CampfireAlertChannel(AlertChannel):'
126 """
125 """
127 template_vars = self.uptime_alert_notification_vars(kwargs)
126 template_vars = self.uptime_alert_notification_vars(kwargs)
128
127
129 message = "ALERT %s: %s has uptime issues %s\n\n" % (
128 message = 'ALERT %s: %s has uptime issues %s\n\n' % (
130 template_vars["alert_action"],
129 template_vars['alert_action'],
131 template_vars["resource_name"],
130 template_vars['resource_name'],
132 template_vars["destination_url"],
131 template_vars['destination_url']
133 )
132 )
134 message += template_vars["reason"]
133 message += template_vars['reason']
135
134
136 for room in self.integration.config["rooms"].split(","):
135 for room in self.integration.config['rooms'].split(','):
137 self.client.speak_to_room(room.strip(), message, sound="VUVUZELA")
136 self.client.speak_to_room(room.strip(), message, sound='VUVUZELA')
138
137
139 def send_digest(self, **kwargs):
138 def send_digest(self, **kwargs):
140 """
139 """
@@ -149,17 +148,17 b' class CampfireAlertChannel(AlertChannel):'
149
148
150 """
149 """
151 template_vars = self.report_alert_notification_vars(kwargs)
150 template_vars = self.report_alert_notification_vars(kwargs)
152 f_args = (template_vars["resource_name"], template_vars["confirmed_total"])
151 f_args = (template_vars['resource_name'],
152 template_vars['confirmed_total'],)
153 message = "Daily report digest: %s - %s reports" % f_args
153 message = "Daily report digest: %s - %s reports" % f_args
154 message += "{}\n".format(template_vars["destination_url"])
154 message += '{}\n'.format(template_vars['destination_url'])
155 for room in self.integration.config["rooms"].split(","):
155 for room in self.integration.config['rooms'].split(','):
156 self.client.speak_to_room(room.strip(), message)
156 self.client.speak_to_room(room.strip(), message)
157
157
158 log_msg = "DIGEST : %s via %s :: %s reports" % (
158 log_msg = 'DIGEST : %s via %s :: %s reports' % (
159 kwargs["user"].user_name,
159 kwargs['user'].user_name,
160 self.channel_visible_value,
160 self.channel_visible_value,
161 template_vars["confirmed_total"],
161 template_vars['confirmed_total'])
162 )
163 log.warning(log_msg)
162 log.warning(log_msg)
164
163
165 def notify_chart_alert(self, **kwargs):
164 def notify_chart_alert(self, **kwargs):
@@ -174,18 +173,16 b' class CampfireAlertChannel(AlertChannel):'
174
173
175 """
174 """
176 template_vars = self.chart_alert_notification_vars(kwargs)
175 template_vars = self.chart_alert_notification_vars(kwargs)
177 message = (
176 message = 'ALERT {}: value in "{}" chart: ' \
178 'ALERT {}: value in "{}" chart: '
177 'met alert "{}" criteria {} \n'.format(
179 'met alert "{}" criteria {} \n'.format(
178 template_vars['alert_action'],
180 template_vars["alert_action"],
179 template_vars['chart_name'],
181 template_vars["chart_name"],
180 template_vars['action_name'],
182 template_vars["action_name"],
181 template_vars['destination_url']
183 template_vars["destination_url"],
184 )
185 )
182 )
186
183
187 for item in template_vars["readable_values"]:
184 for item in template_vars['readable_values']:
188 message += "{}: {}\n".format(item["label"], item["value"])
185 message += '{}: {}\n'.format(item['label'], item['value'])
189
186
190 for room in self.integration.config["rooms"].split(","):
187 for room in self.integration.config['rooms'].split(','):
191 self.client.speak_to_room(room.strip(), message, sound="VUVUZELA")
188 self.client.speak_to_room(room.strip(), message, sound='VUVUZELA')
@@ -27,7 +27,9 b' class EmailAlertChannel(AlertChannel):'
27 Default email alerting channel
27 Default email alerting channel
28 """
28 """
29
29
30 __mapper_args__ = {"polymorphic_identity": "email"}
30 __mapper_args__ = {
31 'polymorphic_identity': 'email'
32 }
31
33
32 def notify_reports(self, **kwargs):
34 def notify_reports(self, **kwargs):
33 """
35 """
@@ -43,30 +45,25 b' class EmailAlertChannel(AlertChannel):'
43 """
45 """
44 template_vars = self.report_alert_notification_vars(kwargs)
46 template_vars = self.report_alert_notification_vars(kwargs)
45
47
46 if template_vars["confirmed_total"] > 1:
48 if template_vars['confirmed_total'] > 1:
47 template_vars["title"] = "AppEnlight :: %s - %s reports" % (
49 template_vars["title"] = "AppEnlight :: %s - %s reports" % (
48 template_vars["resource_name"],
50 template_vars['resource_name'],
49 template_vars["confirmed_total"],
51 template_vars['confirmed_total'],
50 )
52 )
51 else:
53 else:
52 error_title = truncate(
54 error_title = truncate(template_vars['reports'][0][1].error or
53 template_vars["reports"][0][1].error or "slow report", 20
55 'slow report', 20)
54 )
55 template_vars["title"] = "AppEnlight :: %s - '%s' report" % (
56 template_vars["title"] = "AppEnlight :: %s - '%s' report" % (
56 template_vars["resource_name"],
57 template_vars['resource_name'],
57 error_title,
58 error_title)
58 )
59 UserService.send_email(kwargs['request'],
59 UserService.send_email(
60 [self.channel_value],
60 kwargs["request"],
61 template_vars,
61 [self.channel_value],
62 '/email_templates/notify_reports.jinja2')
62 template_vars,
63 log_msg = 'NOTIFY : %s via %s :: %s reports' % (
63 "/email_templates/notify_reports.jinja2",
64 kwargs['user'].user_name,
64 )
65 log_msg = "NOTIFY : %s via %s :: %s reports" % (
66 kwargs["user"].user_name,
67 self.channel_visible_value,
65 self.channel_visible_value,
68 template_vars["confirmed_total"],
66 template_vars['confirmed_total'])
69 )
70 log.warning(log_msg)
67 log.warning(log_msg)
71
68
72 def send_digest(self, **kwargs):
69 def send_digest(self, **kwargs):
@@ -84,23 +81,20 b' class EmailAlertChannel(AlertChannel):'
84 template_vars = self.report_alert_notification_vars(kwargs)
81 template_vars = self.report_alert_notification_vars(kwargs)
85 title = "AppEnlight :: Daily report digest: %s - %s reports"
82 title = "AppEnlight :: Daily report digest: %s - %s reports"
86 template_vars["email_title"] = title % (
83 template_vars["email_title"] = title % (
87 template_vars["resource_name"],
84 template_vars['resource_name'],
88 template_vars["confirmed_total"],
85 template_vars['confirmed_total'],
89 )
86 )
90
87
91 UserService.send_email(
88 UserService.send_email(kwargs['request'],
92 kwargs["request"],
89 [self.channel_value],
93 [self.channel_value],
90 template_vars,
94 template_vars,
91 '/email_templates/notify_reports.jinja2',
95 "/email_templates/notify_reports.jinja2",
92 immediately=True,
96 immediately=True,
93 silent=True)
97 silent=True,
94 log_msg = 'DIGEST : %s via %s :: %s reports' % (
98 )
95 kwargs['user'].user_name,
99 log_msg = "DIGEST : %s via %s :: %s reports" % (
100 kwargs["user"].user_name,
101 self.channel_visible_value,
96 self.channel_visible_value,
102 template_vars["confirmed_total"],
97 template_vars['confirmed_total'])
103 )
104 log.warning(log_msg)
98 log.warning(log_msg)
105
99
106 def notify_report_alert(self, **kwargs):
100 def notify_report_alert(self, **kwargs):
@@ -116,26 +110,23 b' class EmailAlertChannel(AlertChannel):'
116 """
110 """
117 template_vars = self.report_alert_notification_vars(kwargs)
111 template_vars = self.report_alert_notification_vars(kwargs)
118
112
119 if kwargs["event"].unified_alert_action() == "OPEN":
113 if kwargs['event'].unified_alert_action() == 'OPEN':
120 title = "AppEnlight :: ALERT %s: %s - %s %s" % (
114 title = 'AppEnlight :: ALERT %s: %s - %s %s' % (
121 template_vars["alert_action"],
115 template_vars['alert_action'],
122 template_vars["resource_name"],
116 template_vars['resource_name'],
123 kwargs["event"].values["reports"],
117 kwargs['event'].values['reports'],
124 template_vars["report_type"],
118 template_vars['report_type'],
125 )
119 )
126 else:
120 else:
127 title = "AppEnlight :: ALERT %s: %s type: %s" % (
121 title = 'AppEnlight :: ALERT %s: %s type: %s' % (
128 template_vars["alert_action"],
122 template_vars['alert_action'],
129 template_vars["resource_name"],
123 template_vars['resource_name'],
130 template_vars["alert_type"].replace("_", " "),
124 template_vars['alert_type'].replace('_', ' '),
131 )
125 )
132 template_vars["email_title"] = title
126 template_vars['email_title'] = title
133 UserService.send_email(
127 UserService.send_email(kwargs['request'], [self.channel_value],
134 kwargs["request"],
128 template_vars,
135 [self.channel_value],
129 '/email_templates/alert_reports.jinja2')
136 template_vars,
137 "/email_templates/alert_reports.jinja2",
138 )
139
130
140 def notify_uptime_alert(self, **kwargs):
131 def notify_uptime_alert(self, **kwargs):
141 """
132 """
@@ -149,18 +140,15 b' class EmailAlertChannel(AlertChannel):'
149
140
150 """
141 """
151 template_vars = self.uptime_alert_notification_vars(kwargs)
142 template_vars = self.uptime_alert_notification_vars(kwargs)
152 title = "AppEnlight :: ALERT %s: %s has uptime issues" % (
143 title = 'AppEnlight :: ALERT %s: %s has uptime issues' % (
153 template_vars["alert_action"],
144 template_vars['alert_action'],
154 template_vars["resource_name"],
145 template_vars['resource_name'],
155 )
146 )
156 template_vars["email_title"] = title
147 template_vars['email_title'] = title
157
148
158 UserService.send_email(
149 UserService.send_email(kwargs['request'], [self.channel_value],
159 kwargs["request"],
150 template_vars,
160 [self.channel_value],
151 '/email_templates/alert_uptime.jinja2')
161 template_vars,
162 "/email_templates/alert_uptime.jinja2",
163 )
164
152
165 def notify_chart_alert(self, **kwargs):
153 def notify_chart_alert(self, **kwargs):
166 """
154 """
@@ -175,18 +163,13 b' class EmailAlertChannel(AlertChannel):'
175 """
163 """
176 template_vars = self.chart_alert_notification_vars(kwargs)
164 template_vars = self.chart_alert_notification_vars(kwargs)
177
165
178 title = (
166 title = 'AppEnlight :: ALERT {} value in "{}" chart' \
179 'AppEnlight :: ALERT {} value in "{}" chart'
167 ' met alert "{}" criteria'.format(
180 ' met alert "{}" criteria'.format(
168 template_vars['alert_action'],
181 template_vars["alert_action"],
169 template_vars['chart_name'],
182 template_vars["chart_name"],
170 template_vars['action_name'],
183 template_vars["action_name"],
184 )
185 )
186 template_vars["email_title"] = title
187 UserService.send_email(
188 kwargs["request"],
189 [self.channel_value],
190 template_vars,
191 "/email_templates/alert_chart.jinja2",
192 )
171 )
172 template_vars['email_title'] = title
173 UserService.send_email(kwargs['request'], [self.channel_value],
174 template_vars,
175 '/email_templates/alert_chart.jinja2')
@@ -23,7 +23,9 b' log = logging.getLogger(__name__)'
23
23
24
24
25 class FlowdockAlertChannel(AlertChannel):
25 class FlowdockAlertChannel(AlertChannel):
26 __mapper_args__ = {"polymorphic_identity": "flowdock"}
26 __mapper_args__ = {
27 'polymorphic_identity': 'flowdock'
28 }
27
29
28 def notify_reports(self, **kwargs):
30 def notify_reports(self, **kwargs):
29 """
31 """
@@ -39,45 +41,44 b' class FlowdockAlertChannel(AlertChannel):'
39 """
41 """
40 template_vars = self.report_alert_notification_vars(kwargs)
42 template_vars = self.report_alert_notification_vars(kwargs)
41
43
42 app_url = kwargs["request"].registry.settings["_mail_url"]
44 app_url = kwargs['request'].registry.settings['_mail_url']
43 destination_url = kwargs["request"].route_url("/", _app_url=app_url)
45 destination_url = kwargs['request'].route_url('/',
44 f_args = (
46 _app_url=app_url)
45 "report",
47 f_args = ('report',
46 template_vars["resource"].resource_id,
48 template_vars['resource'].resource_id,
47 template_vars["url_start_date"].strftime("%Y-%m-%dT%H:%M"),
49 template_vars['url_start_date'].strftime('%Y-%m-%dT%H:%M'),
48 template_vars["url_end_date"].strftime("%Y-%m-%dT%H:%M"),
50 template_vars['url_end_date'].strftime('%Y-%m-%dT%H:%M'))
49 )
51 destination_url += 'ui/{}?resource={}&start_date={}&end_date={}'.format(
50 destination_url += "ui/{}?resource={}&start_date={}&end_date={}".format(*f_args)
52 *f_args)
51
53
52 if template_vars["confirmed_total"] > 1:
54 if template_vars['confirmed_total'] > 1:
53 template_vars["title"] = "%s - %s reports" % (
55 template_vars["title"] = "%s - %s reports" % (
54 template_vars["resource_name"],
56 template_vars['resource_name'],
55 template_vars["confirmed_total"],
57 template_vars['confirmed_total'],
56 )
58 )
57 else:
59 else:
58 error_title = truncate(
60 error_title = truncate(template_vars['reports'][0][1].error or
59 template_vars["reports"][0][1].error or "slow report", 90
61 'slow report', 90)
60 )
61 template_vars["title"] = "%s - '%s' report" % (
62 template_vars["title"] = "%s - '%s' report" % (
62 template_vars["resource_name"],
63 template_vars['resource_name'],
63 error_title,
64 error_title)
64 )
65
65
66 log_msg = "NOTIFY : %s via %s :: %s reports" % (
66 log_msg = 'NOTIFY : %s via %s :: %s reports' % (
67 kwargs["user"].user_name,
67 kwargs['user'].user_name,
68 self.channel_visible_value,
68 self.channel_visible_value,
69 template_vars["confirmed_total"],
69 template_vars['confirmed_total'])
70 )
71 log.warning(log_msg)
70 log.warning(log_msg)
72
71
73 client = FlowdockIntegration.create_client(self.integration.config["api_token"])
72 client = FlowdockIntegration.create_client(
73 self.integration.config['api_token'])
74 payload = {
74 payload = {
75 "source": "AppEnlight",
75 "source": "AppEnlight",
76 "from_address": kwargs["request"].registry.settings["mailing.from_email"],
76 "from_address": kwargs['request'].registry.settings[
77 'mailing.from_email'],
77 "subject": template_vars["title"],
78 "subject": template_vars["title"],
78 "content": "New report present",
79 "content": "New report present",
79 "tags": ["appenlight"],
80 "tags": ["appenlight"],
80 "link": destination_url,
81 "link": destination_url
81 }
82 }
82 client.send_to_inbox(payload)
83 client.send_to_inbox(payload)
83
84
@@ -94,30 +95,32 b' class FlowdockAlertChannel(AlertChannel):'
94 """
95 """
95 template_vars = self.report_alert_notification_vars(kwargs)
96 template_vars = self.report_alert_notification_vars(kwargs)
96
97
97 if kwargs["event"].unified_alert_action() == "OPEN":
98 if kwargs['event'].unified_alert_action() == 'OPEN':
98
99
99 title = "ALERT %s: %s - %s %s" % (
100 title = 'ALERT %s: %s - %s %s' % (
100 template_vars["alert_action"],
101 template_vars['alert_action'],
101 template_vars["resource_name"],
102 template_vars['resource_name'],
102 kwargs["event"].values["reports"],
103 kwargs['event'].values['reports'],
103 template_vars["report_type"],
104 template_vars['report_type'],
104 )
105 )
105
106
106 else:
107 else:
107 title = "ALERT %s: %s type: %s" % (
108 title = 'ALERT %s: %s type: %s' % (
108 template_vars["alert_action"],
109 template_vars['alert_action'],
109 template_vars["resource_name"],
110 template_vars['resource_name'],
110 template_vars["alert_type"].replace("_", " "),
111 template_vars['alert_type'].replace('_', ' '),
111 )
112 )
112
113
113 client = FlowdockIntegration.create_client(self.integration.config["api_token"])
114 client = FlowdockIntegration.create_client(
115 self.integration.config['api_token'])
114 payload = {
116 payload = {
115 "source": "AppEnlight",
117 "source": "AppEnlight",
116 "from_address": kwargs["request"].registry.settings["mailing.from_email"],
118 "from_address": kwargs['request'].registry.settings[
119 'mailing.from_email'],
117 "subject": title,
120 "subject": title,
118 "content": "Investigation required",
121 "content": 'Investigation required',
119 "tags": ["appenlight", "alert", template_vars["alert_type"]],
122 "tags": ["appenlight", "alert", template_vars['alert_type']],
120 "link": template_vars["destination_url"],
123 "link": template_vars['destination_url']
121 }
124 }
122 client.send_to_inbox(payload)
125 client.send_to_inbox(payload)
123
126
@@ -134,21 +137,23 b' class FlowdockAlertChannel(AlertChannel):'
134 """
137 """
135 template_vars = self.uptime_alert_notification_vars(kwargs)
138 template_vars = self.uptime_alert_notification_vars(kwargs)
136
139
137 message = "ALERT %s: %s has uptime issues" % (
140 message = 'ALERT %s: %s has uptime issues' % (
138 template_vars["alert_action"],
141 template_vars['alert_action'],
139 template_vars["resource_name"],
142 template_vars['resource_name'],
140 )
143 )
141 submessage = "Info: "
144 submessage = 'Info: '
142 submessage += template_vars["reason"]
145 submessage += template_vars['reason']
143
146
144 client = FlowdockIntegration.create_client(self.integration.config["api_token"])
147 client = FlowdockIntegration.create_client(
148 self.integration.config['api_token'])
145 payload = {
149 payload = {
146 "source": "AppEnlight",
150 "source": "AppEnlight",
147 "from_address": kwargs["request"].registry.settings["mailing.from_email"],
151 "from_address": kwargs['request'].registry.settings[
152 'mailing.from_email'],
148 "subject": message,
153 "subject": message,
149 "content": submessage,
154 "content": submessage,
150 "tags": ["appenlight", "alert", "uptime"],
155 "tags": ["appenlight", "alert", 'uptime'],
151 "link": template_vars["destination_url"],
156 "link": template_vars['destination_url']
152 }
157 }
153 client.send_to_inbox(payload)
158 client.send_to_inbox(payload)
154
159
@@ -166,29 +171,29 b' class FlowdockAlertChannel(AlertChannel):'
166 """
171 """
167 template_vars = self.report_alert_notification_vars(kwargs)
172 template_vars = self.report_alert_notification_vars(kwargs)
168 message = "Daily report digest: %s - %s reports" % (
173 message = "Daily report digest: %s - %s reports" % (
169 template_vars["resource_name"],
174 template_vars['resource_name'], template_vars['confirmed_total'])
170 template_vars["confirmed_total"],
171 )
172
175
173 f_args = (template_vars["confirmed_total"], template_vars["timestamp"])
176 f_args = (template_vars['confirmed_total'],
177 template_vars['timestamp'])
174
178
175 payload = {
179 payload = {
176 "source": "AppEnlight",
180 "source": "AppEnlight",
177 "from_address": kwargs["request"].registry.settings["mailing.from_email"],
181 "from_address": kwargs['request'].registry.settings[
182 'mailing.from_email'],
178 "subject": message,
183 "subject": message,
179 "content": "%s reports in total since %s" % f_args,
184 "content": '%s reports in total since %s' % f_args,
180 "tags": ["appenlight", "digest"],
185 "tags": ["appenlight", "digest"],
181 "link": template_vars["destination_url"],
186 "link": template_vars['destination_url']
182 }
187 }
183
188
184 client = FlowdockIntegration.create_client(self.integration.config["api_token"])
189 client = FlowdockIntegration.create_client(
190 self.integration.config['api_token'])
185 client.send_to_inbox(payload)
191 client.send_to_inbox(payload)
186
192
187 log_msg = "DIGEST : %s via %s :: %s reports" % (
193 log_msg = 'DIGEST : %s via %s :: %s reports' % (
188 kwargs["user"].user_name,
194 kwargs['user'].user_name,
189 self.channel_visible_value,
195 self.channel_visible_value,
190 template_vars["confirmed_total"],
196 template_vars['confirmed_total'])
191 )
192 log.warning(log_msg)
197 log.warning(log_msg)
193
198
194 def notify_chart_alert(self, **kwargs):
199 def notify_chart_alert(self, **kwargs):
@@ -204,22 +209,25 b' class FlowdockAlertChannel(AlertChannel):'
204 """
209 """
205 template_vars = self.chart_alert_notification_vars(kwargs)
210 template_vars = self.chart_alert_notification_vars(kwargs)
206
211
207 message = 'ALERT {}: value in "{}" chart ' 'met alert "{}" criteria'.format(
212 message = 'ALERT {}: value in "{}" chart ' \
208 template_vars["alert_action"],
213 'met alert "{}" criteria'.format(
209 template_vars["chart_name"],
214 template_vars['alert_action'],
210 template_vars["action_name"],
215 template_vars['chart_name'],
216 template_vars['action_name'],
211 )
217 )
212 submessage = "Info: "
218 submessage = 'Info: '
213 for item in template_vars["readable_values"]:
219 for item in template_vars['readable_values']:
214 submessage += "{}: {}\n".format(item["label"], item["value"])
220 submessage += '{}: {}\n'.format(item['label'], item['value'])
215
221
216 client = FlowdockIntegration.create_client(self.integration.config["api_token"])
222 client = FlowdockIntegration.create_client(
223 self.integration.config['api_token'])
217 payload = {
224 payload = {
218 "source": "AppEnlight",
225 "source": "AppEnlight",
219 "from_address": kwargs["request"].registry.settings["mailing.from_email"],
226 "from_address": kwargs['request'].registry.settings[
227 'mailing.from_email'],
220 "subject": message,
228 "subject": message,
221 "content": submessage,
229 "content": submessage,
222 "tags": ["appenlight", "alert", "chart"],
230 "tags": ["appenlight", "alert", 'chart'],
223 "link": template_vars["destination_url"],
231 "link": template_vars['destination_url']
224 }
232 }
225 client.send_to_inbox(payload)
233 client.send_to_inbox(payload)
@@ -23,7 +23,9 b' log = logging.getLogger(__name__)'
23
23
24
24
25 class HipchatAlertChannel(AlertChannel):
25 class HipchatAlertChannel(AlertChannel):
26 __mapper_args__ = {"polymorphic_identity": "hipchat"}
26 __mapper_args__ = {
27 'polymorphic_identity': 'hipchat'
28 }
27
29
28 def notify_reports(self, **kwargs):
30 def notify_reports(self, **kwargs):
29 """
31 """
@@ -39,50 +41,46 b' class HipchatAlertChannel(AlertChannel):'
39 """
41 """
40 template_vars = self.report_alert_notification_vars(kwargs)
42 template_vars = self.report_alert_notification_vars(kwargs)
41
43
42 app_url = kwargs["request"].registry.settings["_mail_url"]
44 app_url = kwargs['request'].registry.settings['_mail_url']
43 destination_url = kwargs["request"].route_url("/", _app_url=app_url)
45 destination_url = kwargs['request'].route_url('/',
44 f_args = (
46 _app_url=app_url)
45 "report",
47 f_args = ('report',
46 template_vars["resource"].resource_id,
48 template_vars['resource'].resource_id,
47 template_vars["url_start_date"].strftime("%Y-%m-%dT%H:%M"),
49 template_vars['url_start_date'].strftime('%Y-%m-%dT%H:%M'),
48 template_vars["url_end_date"].strftime("%Y-%m-%dT%H:%M"),
50 template_vars['url_end_date'].strftime('%Y-%m-%dT%H:%M'))
49 )
51 destination_url += 'ui/{}?resource={}&start_date={}&end_date={}'.format(
50 destination_url += "ui/{}?resource={}&start_date={}&end_date={}".format(*f_args)
52 *f_args)
51
53
52 if template_vars["confirmed_total"] > 1:
54 if template_vars['confirmed_total'] > 1:
53 template_vars["title"] = "%s - %s reports" % (
55 template_vars["title"] = "%s - %s reports" % (
54 template_vars["resource_name"],
56 template_vars['resource_name'],
55 template_vars["confirmed_total"],
57 template_vars['confirmed_total'],
56 )
58 )
57 else:
59 else:
58 error_title = truncate(
60 error_title = truncate(template_vars['reports'][0][1].error or
59 template_vars["reports"][0][1].error or "slow report", 90
61 'slow report', 90)
60 )
61 template_vars["title"] = "%s - '%s' report" % (
62 template_vars["title"] = "%s - '%s' report" % (
62 template_vars["resource_name"],
63 template_vars['resource_name'],
63 error_title,
64 error_title)
64 )
65
65
66 template_vars["title"] += " " + destination_url
66 template_vars["title"] += ' ' + destination_url
67
67
68 log_msg = "NOTIFY : %s via %s :: %s reports" % (
68 log_msg = 'NOTIFY : %s via %s :: %s reports' % (
69 kwargs["user"].user_name,
69 kwargs['user'].user_name,
70 self.channel_visible_value,
70 self.channel_visible_value,
71 template_vars["confirmed_total"],
71 template_vars['confirmed_total'])
72 )
73 log.warning(log_msg)
72 log.warning(log_msg)
74
73
75 client = HipchatIntegration.create_client(self.integration.config["api_token"])
74 client = HipchatIntegration.create_client(
76 for room in self.integration.config["rooms"].split(","):
75 self.integration.config['api_token'])
77 client.send(
76 for room in self.integration.config['rooms'].split(','):
78 {
77 client.send({
79 "message_format": "text",
78 "message_format": 'text',
80 "message": template_vars["title"],
79 "message": template_vars["title"],
81 "from": "AppEnlight",
80 "from": "AppEnlight",
82 "room_id": room.strip(),
81 "room_id": room.strip(),
83 "color": "yellow",
82 "color": "yellow"
84 }
83 })
85 )
86
84
87 def notify_report_alert(self, **kwargs):
85 def notify_report_alert(self, **kwargs):
88 """
86 """
@@ -97,37 +95,35 b' class HipchatAlertChannel(AlertChannel):'
97 """
95 """
98 template_vars = self.report_alert_notification_vars(kwargs)
96 template_vars = self.report_alert_notification_vars(kwargs)
99
97
100 if kwargs["event"].unified_alert_action() == "OPEN":
98 if kwargs['event'].unified_alert_action() == 'OPEN':
101
99
102 title = "ALERT %s: %s - %s %s" % (
100 title = 'ALERT %s: %s - %s %s' % (
103 template_vars["alert_action"],
101 template_vars['alert_action'],
104 template_vars["resource_name"],
102 template_vars['resource_name'],
105 kwargs["event"].values["reports"],
103 kwargs['event'].values['reports'],
106 template_vars["report_type"],
104 template_vars['report_type'],
107 )
105 )
108
106
109 else:
107 else:
110 title = "ALERT %s: %s type: %s" % (
108 title = 'ALERT %s: %s type: %s' % (
111 template_vars["alert_action"],
109 template_vars['alert_action'],
112 template_vars["resource_name"],
110 template_vars['resource_name'],
113 template_vars["alert_type"].replace("_", " "),
111 template_vars['alert_type'].replace('_', ' '),
114 )
112 )
115
113
116 title += "\n " + template_vars["destination_url"]
114 title += '\n ' + template_vars['destination_url']
117
115
118 api_token = self.integration.config["api_token"]
116 api_token = self.integration.config['api_token']
119 client = HipchatIntegration.create_client(api_token)
117 client = HipchatIntegration.create_client(api_token)
120 for room in self.integration.config["rooms"].split(","):
118 for room in self.integration.config['rooms'].split(','):
121 client.send(
119 client.send({
122 {
120 "message_format": 'text',
123 "message_format": "text",
121 "message": title,
124 "message": title,
122 "from": "AppEnlight",
125 "from": "AppEnlight",
123 "room_id": room.strip(),
126 "room_id": room.strip(),
124 "color": "red",
127 "color": "red",
125 "notify": '1'
128 "notify": "1",
126 })
129 }
130 )
131
127
132 def notify_uptime_alert(self, **kwargs):
128 def notify_uptime_alert(self, **kwargs):
133 """
129 """
@@ -142,26 +138,24 b' class HipchatAlertChannel(AlertChannel):'
142 """
138 """
143 template_vars = self.uptime_alert_notification_vars(kwargs)
139 template_vars = self.uptime_alert_notification_vars(kwargs)
144
140
145 message = "ALERT %s: %s has uptime issues\n" % (
141 message = 'ALERT %s: %s has uptime issues\n' % (
146 template_vars["alert_action"],
142 template_vars['alert_action'],
147 template_vars["resource_name"],
143 template_vars['resource_name'],
148 )
144 )
149 message += template_vars["reason"]
145 message += template_vars['reason']
150 message += "\n{}".format(template_vars["destination_url"])
146 message += '\n{}'.format(template_vars['destination_url'])
151
147
152 api_token = self.integration.config["api_token"]
148 api_token = self.integration.config['api_token']
153 client = HipchatIntegration.create_client(api_token)
149 client = HipchatIntegration.create_client(api_token)
154 for room in self.integration.config["rooms"].split(","):
150 for room in self.integration.config['rooms'].split(','):
155 client.send(
151 client.send({
156 {
152 "message_format": 'text',
157 "message_format": "text",
153 "message": message,
158 "message": message,
154 "from": "AppEnlight",
159 "from": "AppEnlight",
155 "room_id": room.strip(),
160 "room_id": room.strip(),
156 "color": "red",
161 "color": "red",
157 "notify": '1'
162 "notify": "1",
158 })
163 }
164 )
165
159
166 def notify_chart_alert(self, **kwargs):
160 def notify_chart_alert(self, **kwargs):
167 """
161 """
@@ -175,30 +169,29 b' class HipchatAlertChannel(AlertChannel):'
175
169
176 """
170 """
177 template_vars = self.chart_alert_notification_vars(kwargs)
171 template_vars = self.chart_alert_notification_vars(kwargs)
178 message = 'ALERT {}: value in "{}" chart: ' 'met alert "{}" criteria\n'.format(
172 message = 'ALERT {}: value in "{}" chart: ' \
179 template_vars["alert_action"],
173 'met alert "{}" criteria\n'.format(
180 template_vars["chart_name"],
174 template_vars['alert_action'],
181 template_vars["action_name"],
175 template_vars['chart_name'],
176 template_vars['action_name'],
182 )
177 )
183
178
184 for item in template_vars["readable_values"]:
179 for item in template_vars['readable_values']:
185 message += "{}: {}\n".format(item["label"], item["value"])
180 message += '{}: {}\n'.format(item['label'], item['value'])
186
181
187 message += template_vars["destination_url"]
182 message += template_vars['destination_url']
188
183
189 api_token = self.integration.config["api_token"]
184 api_token = self.integration.config['api_token']
190 client = HipchatIntegration.create_client(api_token)
185 client = HipchatIntegration.create_client(api_token)
191 for room in self.integration.config["rooms"].split(","):
186 for room in self.integration.config['rooms'].split(','):
192 client.send(
187 client.send({
193 {
188 "message_format": 'text',
194 "message_format": "text",
189 "message": message,
195 "message": message,
190 "from": "AppEnlight",
196 "from": "AppEnlight",
191 "room_id": room.strip(),
197 "room_id": room.strip(),
192 "color": "red",
198 "color": "red",
193 "notify": '1'
199 "notify": "1",
194 })
200 }
201 )
202
195
203 def send_digest(self, **kwargs):
196 def send_digest(self, **kwargs):
204 """
197 """
@@ -213,26 +206,24 b' class HipchatAlertChannel(AlertChannel):'
213
206
214 """
207 """
215 template_vars = self.report_alert_notification_vars(kwargs)
208 template_vars = self.report_alert_notification_vars(kwargs)
216 f_args = (template_vars["resource_name"], template_vars["confirmed_total"])
209 f_args = (template_vars['resource_name'],
210 template_vars['confirmed_total'],)
217 message = "Daily report digest: %s - %s reports" % f_args
211 message = "Daily report digest: %s - %s reports" % f_args
218 message += "\n{}".format(template_vars["destination_url"])
212 message += '\n{}'.format(template_vars['destination_url'])
219 api_token = self.integration.config["api_token"]
213 api_token = self.integration.config['api_token']
220 client = HipchatIntegration.create_client(api_token)
214 client = HipchatIntegration.create_client(api_token)
221 for room in self.integration.config["rooms"].split(","):
215 for room in self.integration.config['rooms'].split(','):
222 client.send(
216 client.send({
223 {
217 "message_format": 'text',
224 "message_format": "text",
218 "message": message,
225 "message": message,
219 "from": "AppEnlight",
226 "from": "AppEnlight",
220 "room_id": room.strip(),
227 "room_id": room.strip(),
221 "color": "green",
228 "color": "green",
222 "notify": '1'
229 "notify": "1",
223 })
230 }
224
231 )
225 log_msg = 'DIGEST : %s via %s :: %s reports' % (
232
226 kwargs['user'].user_name,
233 log_msg = "DIGEST : %s via %s :: %s reports" % (
234 kwargs["user"].user_name,
235 self.channel_visible_value,
227 self.channel_visible_value,
236 template_vars["confirmed_total"],
228 template_vars['confirmed_total'])
237 )
238 log.warning(log_msg)
229 log.warning(log_msg)
@@ -23,7 +23,9 b' log = logging.getLogger(__name__)'
23
23
24
24
25 class SlackAlertChannel(AlertChannel):
25 class SlackAlertChannel(AlertChannel):
26 __mapper_args__ = {"polymorphic_identity": "slack"}
26 __mapper_args__ = {
27 'polymorphic_identity': 'slack'
28 }
27
29
28 def notify_reports(self, **kwargs):
30 def notify_reports(self, **kwargs):
29 """
31 """
@@ -38,40 +40,45 b' class SlackAlertChannel(AlertChannel):'
38
40
39 """
41 """
40 template_vars = self.report_alert_notification_vars(kwargs)
42 template_vars = self.report_alert_notification_vars(kwargs)
41 template_vars["title"] = template_vars["resource_name"]
43 template_vars["title"] = template_vars['resource_name']
42
44
43 if template_vars["confirmed_total"] > 1:
45 if template_vars['confirmed_total'] > 1:
44 template_vars["subtext"] = "%s reports" % template_vars["confirmed_total"]
46 template_vars['subtext'] = '%s reports' % template_vars[
47 'confirmed_total']
45 else:
48 else:
46 error_title = truncate(
49 error_title = truncate(template_vars['reports'][0][1].error or
47 template_vars["reports"][0][1].error or "slow report", 90
50 'slow report', 90)
48 )
51 template_vars['subtext'] = error_title
49 template_vars["subtext"] = error_title
50
52
51 log_msg = "NOTIFY : %s via %s :: %s reports" % (
53 log_msg = 'NOTIFY : %s via %s :: %s reports' % (
52 kwargs["user"].user_name,
54 kwargs['user'].user_name,
53 self.channel_visible_value,
55 self.channel_visible_value,
54 template_vars["confirmed_total"],
56 template_vars['confirmed_total'])
55 )
56 log.warning(log_msg)
57 log.warning(log_msg)
57
58
58 client = SlackIntegration.create_client(self.integration.config["webhook_url"])
59 client = SlackIntegration.create_client(
60 self.integration.config['webhook_url'])
59 report_data = {
61 report_data = {
60 "username": "AppEnlight",
62 "username": "AppEnlight",
61 "icon_emoji": ":fire:",
63 "icon_emoji": ":fire:",
62 "attachments": [
64 "attachments": [
63 {
65 {
64 "mrkdwn_in": ["text", "pretext", "title", "fallback"],
66 "mrkdwn_in": ["text", "pretext", "title", "fallback"],
65 "fallback": "*%s* - <%s| Browse>"
67 "fallback": "*%s* - <%s| Browse>" % (
66 % (template_vars["title"], template_vars["destination_url"]),
68 template_vars["title"],
67 "pretext": "*%s* - <%s| Browse>"
69 template_vars['destination_url']),
68 % (template_vars["title"], template_vars["destination_url"]),
70 "pretext": "*%s* - <%s| Browse>" % (
71 template_vars["title"],
72 template_vars['destination_url']),
69 "color": "warning",
73 "color": "warning",
70 "fields": [
74 "fields": [
71 {"value": "Info: %s" % template_vars["subtext"], "short": False}
75 {
72 ],
76 "value": 'Info: %s' % template_vars['subtext'],
77 "short": False
78 }
79 ]
73 }
80 }
74 ],
81 ]
75 }
82 }
76 client.make_request(data=report_data)
83 client.make_request(data=report_data)
77
84
@@ -88,51 +95,53 b' class SlackAlertChannel(AlertChannel):'
88 """
95 """
89 template_vars = self.report_alert_notification_vars(kwargs)
96 template_vars = self.report_alert_notification_vars(kwargs)
90
97
91 if kwargs["event"].unified_alert_action() == "OPEN":
98 if kwargs['event'].unified_alert_action() == 'OPEN':
92 title = "*ALERT %s*: %s" % (
99 title = '*ALERT %s*: %s' % (
93 template_vars["alert_action"],
100 template_vars['alert_action'],
94 template_vars["resource_name"],
101 template_vars['resource_name']
95 )
102 )
96
103
97 template_vars["subtext"] = "Got at least %s %s" % (
104 template_vars['subtext'] = 'Got at least %s %s' % (
98 kwargs["event"].values["reports"],
105 kwargs['event'].values['reports'],
99 template_vars["report_type"],
106 template_vars['report_type']
100 )
107 )
101
108
102 else:
109 else:
103 title = "*ALERT %s*: %s" % (
110 title = '*ALERT %s*: %s' % (
104 template_vars["alert_action"],
111 template_vars['alert_action'],
105 template_vars["resource_name"],
112 template_vars['resource_name'],
106 )
113 )
107
114
108 template_vars["subtext"] = ""
115 template_vars['subtext'] = ''
109
116
110 alert_type = template_vars["alert_type"].replace("_", " ")
117 alert_type = template_vars['alert_type'].replace('_', ' ')
111 alert_type = alert_type.replace("alert", "").capitalize()
118 alert_type = alert_type.replace('alert', '').capitalize()
112
119
113 template_vars["type"] = "Type: %s" % alert_type
120 template_vars['type'] = "Type: %s" % alert_type
114
121
115 client = SlackIntegration.create_client(self.integration.config["webhook_url"])
122 client = SlackIntegration.create_client(
123 self.integration.config['webhook_url']
124 )
116 report_data = {
125 report_data = {
117 "username": "AppEnlight",
126 "username": "AppEnlight",
118 "icon_emoji": ":rage:",
127 "icon_emoji": ":rage:",
119 "attachments": [
128 "attachments": [
120 {
129 {
121 "mrkdwn_in": ["text", "pretext", "title", "fallback"],
130 "mrkdwn_in": ["text", "pretext", "title", "fallback"],
122 "fallback": "%s - <%s| Browse>"
131 "fallback": "%s - <%s| Browse>" % (
123 % (title, template_vars["destination_url"]),
132 title, template_vars['destination_url']),
124 "pretext": "%s - <%s| Browse>"
133 "pretext": "%s - <%s| Browse>" % (
125 % (title, template_vars["destination_url"]),
134 title, template_vars['destination_url']),
126 "color": "danger",
135 "color": "danger",
127 "fields": [
136 "fields": [
128 {
137 {
129 "title": template_vars["type"],
138 "title": template_vars['type'],
130 "value": template_vars["subtext"],
139 "value": template_vars['subtext'],
131 "short": False,
140 "short": False
132 }
141 }
133 ],
142 ]
134 }
143 }
135 ],
144 ]
136 }
145 }
137 client.make_request(data=report_data)
146 client.make_request(data=report_data)
138
147
@@ -149,11 +158,13 b' class SlackAlertChannel(AlertChannel):'
149 """
158 """
150 template_vars = self.uptime_alert_notification_vars(kwargs)
159 template_vars = self.uptime_alert_notification_vars(kwargs)
151
160
152 title = "*ALERT %s*: %s" % (
161 title = '*ALERT %s*: %s' % (
153 template_vars["alert_action"],
162 template_vars['alert_action'],
154 template_vars["resource_name"],
163 template_vars['resource_name'],
164 )
165 client = SlackIntegration.create_client(
166 self.integration.config['webhook_url']
155 )
167 )
156 client = SlackIntegration.create_client(self.integration.config["webhook_url"])
157 report_data = {
168 report_data = {
158 "username": "AppEnlight",
169 "username": "AppEnlight",
159 "icon_emoji": ":rage:",
170 "icon_emoji": ":rage:",
@@ -161,21 +172,19 b' class SlackAlertChannel(AlertChannel):'
161 {
172 {
162 "mrkdwn_in": ["text", "pretext", "title", "fallback"],
173 "mrkdwn_in": ["text", "pretext", "title", "fallback"],
163 "fallback": "{} - <{}| Browse>".format(
174 "fallback": "{} - <{}| Browse>".format(
164 title, template_vars["destination_url"]
175 title, template_vars['destination_url']),
165 ),
166 "pretext": "{} - <{}| Browse>".format(
176 "pretext": "{} - <{}| Browse>".format(
167 title, template_vars["destination_url"]
177 title, template_vars['destination_url']),
168 ),
169 "color": "danger",
178 "color": "danger",
170 "fields": [
179 "fields": [
171 {
180 {
172 "title": "Application has uptime issues",
181 "title": "Application has uptime issues",
173 "value": template_vars["reason"],
182 "value": template_vars['reason'],
174 "short": False,
183 "short": False
175 }
184 }
176 ],
185 ]
177 }
186 }
178 ],
187 ]
179 }
188 }
180 client.make_request(data=report_data)
189 client.make_request(data=report_data)
181
190
@@ -192,39 +201,39 b' class SlackAlertChannel(AlertChannel):'
192 """
201 """
193 template_vars = self.chart_alert_notification_vars(kwargs)
202 template_vars = self.chart_alert_notification_vars(kwargs)
194
203
195 title = '*ALERT {}*: value in *"{}"* chart ' 'met alert *"{}"* criteria'.format(
204 title = '*ALERT {}*: value in *"{}"* chart ' \
196 template_vars["alert_action"],
205 'met alert *"{}"* criteria'.format(
197 template_vars["chart_name"],
206 template_vars['alert_action'],
198 template_vars["action_name"],
207 template_vars['chart_name'],
208 template_vars['action_name'],
199 )
209 )
200
210
201 subtext = ""
211 subtext = ''
202 for item in template_vars["readable_values"]:
212 for item in template_vars['readable_values']:
203 subtext += "{} - {}\n".format(item["label"], item["value"])
213 subtext += '{} - {}\n'.format(item['label'], item['value'])
204
214
205 client = SlackIntegration.create_client(self.integration.config["webhook_url"])
215 client = SlackIntegration.create_client(
216 self.integration.config['webhook_url']
217 )
206 report_data = {
218 report_data = {
207 "username": "AppEnlight",
219 "username": "AppEnlight",
208 "icon_emoji": ":rage:",
220 "icon_emoji": ":rage:",
209 "attachments": [
221 "attachments": [
210 {
222 {"mrkdwn_in": ["text", "pretext", "title", "fallback"],
211 "mrkdwn_in": ["text", "pretext", "title", "fallback"],
223 "fallback": "{} - <{}| Browse>".format(
212 "fallback": "{} - <{}| Browse>".format(
224 title, template_vars['destination_url']),
213 title, template_vars["destination_url"]
225 "pretext": "{} - <{}| Browse>".format(
214 ),
226 title, template_vars['destination_url']),
215 "pretext": "{} - <{}| Browse>".format(
227 "color": "danger",
216 title, template_vars["destination_url"]
228 "fields": [
217 ),
229 {
218 "color": "danger",
230 "title": "Following criteria were met:",
219 "fields": [
231 "value": subtext,
220 {
232 "short": False
221 "title": "Following criteria were met:",
233 }
222 "value": subtext,
234 ]
223 "short": False,
235 }
224 }
236 ]
225 ],
226 }
227 ],
228 }
237 }
229 client.make_request(data=report_data)
238 client.make_request(data=report_data)
230
239
@@ -241,30 +250,36 b' class SlackAlertChannel(AlertChannel):'
241
250
242 """
251 """
243 template_vars = self.report_alert_notification_vars(kwargs)
252 template_vars = self.report_alert_notification_vars(kwargs)
244 title = "*Daily report digest*: %s" % template_vars["resource_name"]
253 title = "*Daily report digest*: %s" % template_vars['resource_name']
245
254
246 subtext = "%s reports" % template_vars["confirmed_total"]
255 subtext = '%s reports' % template_vars['confirmed_total']
247
256
248 client = SlackIntegration.create_client(self.integration.config["webhook_url"])
257 client = SlackIntegration.create_client(
258 self.integration.config['webhook_url']
259 )
249 report_data = {
260 report_data = {
250 "username": "AppEnlight",
261 "username": "AppEnlight",
251 "attachments": [
262 "attachments": [
252 {
263 {
253 "mrkdwn_in": ["text", "pretext", "title", "fallback"],
264 "mrkdwn_in": ["text", "pretext", "title", "fallback"],
254 "fallback": "%s : <%s| Browse>"
265 "fallback": "%s : <%s| Browse>" % (
255 % (title, template_vars["destination_url"]),
266 title, template_vars['destination_url']),
256 "pretext": "%s: <%s| Browse>"
267 "pretext": "%s: <%s| Browse>" % (
257 % (title, template_vars["destination_url"]),
268 title, template_vars['destination_url']),
258 "color": "good",
269 "color": "good",
259 "fields": [{"title": "Got at least: %s" % subtext, "short": False}],
270 "fields": [
271 {
272 "title": "Got at least: %s" % subtext,
273 "short": False
274 }
275 ]
260 }
276 }
261 ],
277 ]
262 }
278 }
263 client.make_request(data=report_data)
279 client.make_request(data=report_data)
264
280
265 log_msg = "DIGEST : %s via %s :: %s reports" % (
281 log_msg = 'DIGEST : %s via %s :: %s reports' % (
266 kwargs["user"].user_name,
282 kwargs['user'].user_name,
267 self.channel_visible_value,
283 self.channel_visible_value,
268 template_vars["confirmed_total"],
284 template_vars['confirmed_total'])
269 )
270 log.warning(log_msg)
285 log.warning(log_msg)
@@ -24,7 +24,7 b' log = logging.getLogger(__name__)'
24
24
25
25
26 def generate_api_key():
26 def generate_api_key():
27 uid = str(uuid.uuid4()).replace("-", "")
27 uid = str(uuid.uuid4()).replace('-', '')
28 return uid[0:32]
28 return uid[0:32]
29
29
30
30
@@ -33,69 +33,61 b' class Application(Resource):'
33 Resource of application type
33 Resource of application type
34 """
34 """
35
35
36 __tablename__ = "applications"
36 __tablename__ = 'applications'
37 __mapper_args__ = {"polymorphic_identity": "application"}
37 __mapper_args__ = {'polymorphic_identity': 'application'}
38
38
39 # lists configurable possible permissions for this resource type
39 # lists configurable possible permissions for this resource type
40 __possible_permissions__ = ("view", "update_reports")
40 __possible_permissions__ = ('view', 'update_reports')
41
41
42 resource_id = sa.Column(
42 resource_id = sa.Column(sa.Integer(),
43 sa.Integer(),
43 sa.ForeignKey('resources.resource_id',
44 sa.ForeignKey("resources.resource_id", onupdate="CASCADE", ondelete="CASCADE"),
44 onupdate='CASCADE',
45 primary_key=True,
45 ondelete='CASCADE', ),
46 )
46 primary_key=True, )
47 domains = sa.Column(sa.UnicodeText(), nullable=False, default="")
47 domains = sa.Column(sa.UnicodeText(), nullable=False, default='')
48 api_key = sa.Column(
48 api_key = sa.Column(sa.String(32), nullable=False, unique=True, index=True,
49 sa.String(32), nullable=False, unique=True, index=True, default=generate_api_key
49 default=generate_api_key)
50 )
50 public_key = sa.Column(sa.String(32), nullable=False, unique=True,
51 public_key = sa.Column(
51 index=True,
52 sa.String(32), nullable=False, unique=True, index=True, default=generate_api_key
52 default=generate_api_key)
53 )
53 default_grouping = sa.Column(sa.Unicode(20), nullable=False,
54 default_grouping = sa.Column(
54 default='url_traceback')
55 sa.Unicode(20), nullable=False, default="url_traceback"
56 )
57 error_report_threshold = sa.Column(sa.Integer(), default=10)
55 error_report_threshold = sa.Column(sa.Integer(), default=10)
58 slow_report_threshold = sa.Column(sa.Integer(), default=10)
56 slow_report_threshold = sa.Column(sa.Integer(), default=10)
59 allow_permanent_storage = sa.Column(sa.Boolean(), default=False, nullable=False)
57 allow_permanent_storage = sa.Column(sa.Boolean(), default=False,
58 nullable=False)
60
59
61 @sa.orm.validates("default_grouping")
60 @sa.orm.validates('default_grouping')
62 def validate_default_grouping(self, key, grouping):
61 def validate_default_grouping(self, key, grouping):
63 """ validate if resouce can have specific permission """
62 """ validate if resouce can have specific permission """
64 assert grouping in ["url_type", "url_traceback", "traceback_server"]
63 assert grouping in ['url_type', 'url_traceback', 'traceback_server']
65 return grouping
64 return grouping
66
65
67 report_groups = sa.orm.relationship(
66 report_groups = sa.orm.relationship('ReportGroup',
68 "ReportGroup",
67 cascade="all, delete-orphan",
69 cascade="all, delete-orphan",
68 passive_deletes=True,
70 passive_deletes=True,
69 passive_updates=True,
71 passive_updates=True,
70 lazy='dynamic',
72 lazy="dynamic",
71 backref=sa.orm.backref('application',
73 backref=sa.orm.backref("application", lazy="joined"),
72 lazy="joined"))
74 )
73
75
74 postprocess_conf = sa.orm.relationship('ApplicationPostprocessConf',
76 postprocess_conf = sa.orm.relationship(
75 cascade="all, delete-orphan",
77 "ApplicationPostprocessConf",
76 passive_deletes=True,
78 cascade="all, delete-orphan",
77 passive_updates=True,
79 passive_deletes=True,
78 backref='resource')
80 passive_updates=True,
79
81 backref="resource",
80 logs = sa.orm.relationship('Log',
82 )
81 lazy='dynamic',
83
82 backref='application',
84 logs = sa.orm.relationship(
83 passive_deletes=True,
85 "Log",
84 passive_updates=True, )
86 lazy="dynamic",
85
87 backref="application",
86 integrations = sa.orm.relationship('IntegrationBase',
88 passive_deletes=True,
87 backref='resource',
89 passive_updates=True,
88 cascade="all, delete-orphan",
90 )
89 passive_deletes=True,
91
90 passive_updates=True, )
92 integrations = sa.orm.relationship(
93 "IntegrationBase",
94 backref="resource",
95 cascade="all, delete-orphan",
96 passive_deletes=True,
97 passive_updates=True,
98 )
99
91
100 def generate_api_key(self):
92 def generate_api_key(self):
101 return generate_api_key()
93 return generate_api_key()
@@ -103,11 +95,10 b' class Application(Resource):'
103
95
104 def after_update(mapper, connection, target):
96 def after_update(mapper, connection, target):
105 from appenlight.models.services.application import ApplicationService
97 from appenlight.models.services.application import ApplicationService
106
98 log.info('clearing out ApplicationService cache')
107 log.info("clearing out ApplicationService cache")
108 ApplicationService.by_id_cached().invalidate(target.resource_id)
99 ApplicationService.by_id_cached().invalidate(target.resource_id)
109 ApplicationService.by_api_key_cached().invalidate(target.api_key)
100 ApplicationService.by_api_key_cached().invalidate(target.api_key)
110
101
111
102
112 sa.event.listen(Application, "after_update", after_update)
103 sa.event.listen(Application, 'after_update', after_update)
113 sa.event.listen(Application, "after_delete", after_update)
104 sa.event.listen(Application, 'after_delete', after_update)
@@ -27,20 +27,18 b' class ApplicationPostprocessConf(Base, BaseModel):'
27 This is later used for rule parsing like "if 10 occurences bump priority +1"
27 This is later used for rule parsing like "if 10 occurences bump priority +1"
28 """
28 """
29
29
30 __tablename__ = "application_postprocess_conf"
30 __tablename__ = 'application_postprocess_conf'
31
31
32 pkey = sa.Column(sa.Integer(), nullable=False, primary_key=True)
32 pkey = sa.Column(sa.Integer(), nullable=False, primary_key=True)
33 resource_id = sa.Column(
33 resource_id = sa.Column(sa.Integer(),
34 sa.Integer(),
34 sa.ForeignKey('resources.resource_id',
35 sa.ForeignKey("resources.resource_id", onupdate="CASCADE", ondelete="CASCADE"),
35 onupdate='CASCADE',
36 )
36 ondelete='CASCADE'))
37 do = sa.Column(sa.Unicode(25), nullable=False)
37 do = sa.Column(sa.Unicode(25), nullable=False)
38 new_value = sa.Column(sa.UnicodeText(), nullable=False, default="")
38 new_value = sa.Column(sa.UnicodeText(), nullable=False, default='')
39 rule = sa.Column(
39 rule = sa.Column(sa.dialects.postgresql.JSON,
40 sa.dialects.postgresql.JSON,
40 nullable=False, default={'field': 'http_status',
41 nullable=False,
41 "op": "ge", "value": "500"})
42 default={"field": "http_status", "op": "ge", "value": "500"},
43 )
44
42
45 def postprocess(self, item):
43 def postprocess(self, item):
46 new_value = int(self.new_value)
44 new_value = int(self.new_value)
@@ -29,22 +29,17 b' class AuthToken(Base, BaseModel):'
29 """
29 """
30 Stores information about possible alerting options
30 Stores information about possible alerting options
31 """
31 """
32
32 __tablename__ = 'auth_tokens'
33 __tablename__ = "auth_tokens"
34
33
35 id = sa.Column(sa.Integer, primary_key=True, nullable=False)
34 id = sa.Column(sa.Integer, primary_key=True, nullable=False)
36 token = sa.Column(
35 token = sa.Column(sa.Unicode(40), nullable=False,
37 sa.Unicode(40),
36 default=lambda x: UserService.generate_random_string(40))
38 nullable=False,
37 owner_id = sa.Column(sa.Unicode(30),
39 default=lambda x: UserService.generate_random_string(40),
38 sa.ForeignKey('users.id', onupdate='CASCADE',
40 )
39 ondelete='CASCADE'))
41 owner_id = sa.Column(
42 sa.Unicode(30),
43 sa.ForeignKey("users.id", onupdate="CASCADE", ondelete="CASCADE"),
44 )
45 creation_date = sa.Column(sa.DateTime, default=lambda x: datetime.utcnow())
40 creation_date = sa.Column(sa.DateTime, default=lambda x: datetime.utcnow())
46 expires = sa.Column(sa.DateTime)
41 expires = sa.Column(sa.DateTime)
47 description = sa.Column(sa.Unicode, default="")
42 description = sa.Column(sa.Unicode, default='')
48
43
49 @property
44 @property
50 def is_expired(self):
45 def is_expired(self):
@@ -54,4 +49,4 b' class AuthToken(Base, BaseModel):'
54 return False
49 return False
55
50
56 def __str__(self):
51 def __str__(self):
57 return "<AuthToken u:%s t:%s...>" % (self.owner_id, self.token[0:10])
52 return '<AuthToken u:%s t:%s...>' % (self.owner_id, self.token[0:10])
@@ -16,13 +16,13 b''
16
16
17 import sqlalchemy as sa
17 import sqlalchemy as sa
18 from ziggurat_foundations.models.base import BaseModel
18 from ziggurat_foundations.models.base import BaseModel
19 from sqlalchemy.dialects.postgresql import JSON
19 from sqlalchemy.dialects.postgres import JSON
20
20
21 from . import Base
21 from . import Base
22
22
23
23
24 class Config(Base, BaseModel):
24 class Config(Base, BaseModel):
25 __tablename__ = "config"
25 __tablename__ = 'config'
26
26
27 key = sa.Column(sa.Unicode, primary_key=True)
27 key = sa.Column(sa.Unicode, primary_key=True)
28 section = sa.Column(sa.Unicode, primary_key=True)
28 section = sa.Column(sa.Unicode, primary_key=True)
@@ -20,50 +20,49 b' import logging'
20 from datetime import datetime
20 from datetime import datetime
21 from appenlight.models import Base, get_db_session
21 from appenlight.models import Base, get_db_session
22 from appenlight.models.services.report_stat import ReportStatService
22 from appenlight.models.services.report_stat import ReportStatService
23 from appenlight.models.resource import Resource
23 from appenlight.models.integrations import IntegrationException
24 from appenlight.models.integrations import IntegrationException
24 from pyramid.threadlocal import get_current_request
25 from pyramid.threadlocal import get_current_request
25 from sqlalchemy.dialects.postgresql import JSON
26 from sqlalchemy.dialects.postgresql import JSON
26 from ziggurat_foundations.models.base import BaseModel
27 from ziggurat_foundations.models.base import BaseModel
27 from ziggurat_foundations.models.services.resource import ResourceService
28
28
29 log = logging.getLogger(__name__)
29 log = logging.getLogger(__name__)
30
30
31
31
32 class Event(Base, BaseModel):
32 class Event(Base, BaseModel):
33 __tablename__ = "events"
33 __tablename__ = 'events'
34
34
35 types = {
35 types = {'error_report_alert': 1,
36 "error_report_alert": 1,
36 'slow_report_alert': 3,
37 "slow_report_alert": 3,
37 'comment': 5,
38 "comment": 5,
38 'assignment': 6,
39 "assignment": 6,
39 'uptime_alert': 7,
40 "uptime_alert": 7,
40 'chart_alert': 9}
41 "chart_alert": 9,
42 }
43
41
44 statuses = {"active": 1, "closed": 0}
42 statuses = {'active': 1,
43 'closed': 0}
45
44
46 id = sa.Column(sa.Integer, primary_key=True)
45 id = sa.Column(sa.Integer, primary_key=True)
47 start_date = sa.Column(sa.DateTime, default=datetime.utcnow)
46 start_date = sa.Column(sa.DateTime, default=datetime.utcnow)
48 end_date = sa.Column(sa.DateTime)
47 end_date = sa.Column(sa.DateTime)
49 status = sa.Column(sa.Integer, default=1)
48 status = sa.Column(sa.Integer, default=1)
50 event_type = sa.Column(sa.Integer, default=1)
49 event_type = sa.Column(sa.Integer, default=1)
51 origin_user_id = sa.Column(sa.Integer(), sa.ForeignKey("users.id"), nullable=True)
50 origin_user_id = sa.Column(sa.Integer(), sa.ForeignKey('users.id'),
52 target_user_id = sa.Column(sa.Integer(), sa.ForeignKey("users.id"), nullable=True)
51 nullable=True)
53 resource_id = sa.Column(
52 target_user_id = sa.Column(sa.Integer(), sa.ForeignKey('users.id'),
54 sa.Integer(), sa.ForeignKey("resources.resource_id"), nullable=True
53 nullable=True)
55 )
54 resource_id = sa.Column(sa.Integer(),
55 sa.ForeignKey('resources.resource_id'),
56 nullable=True)
56 target_id = sa.Column(sa.Integer)
57 target_id = sa.Column(sa.Integer)
57 target_uuid = sa.Column(sa.Unicode(40))
58 target_uuid = sa.Column(sa.Unicode(40))
58 text = sa.Column(sa.UnicodeText())
59 text = sa.Column(sa.UnicodeText())
59 values = sa.Column(JSON(), nullable=False, default=None)
60 values = sa.Column(JSON(), nullable=False, default=None)
60
61
61 def __repr__(self):
62 def __repr__(self):
62 return "<Event %s, app:%s, %s>" % (
63 return '<Event %s, app:%s, %s>' % (self.unified_alert_name(),
63 self.unified_alert_name(),
64 self.resource_id,
64 self.resource_id,
65 self.unified_alert_action())
65 self.unified_alert_action(),
66 )
67
66
68 @property
67 @property
69 def reverse_types(self):
68 def reverse_types(self):
@@ -74,9 +73,9 b' class Event(Base, BaseModel):'
74
73
75 def unified_alert_action(self):
74 def unified_alert_action(self):
76 event_name = self.reverse_types[self.event_type]
75 event_name = self.reverse_types[self.event_type]
77 if self.status == Event.statuses["closed"]:
76 if self.status == Event.statuses['closed']:
78 return "CLOSE"
77 return "CLOSE"
79 if self.status != Event.statuses["closed"]:
78 if self.status != Event.statuses['closed']:
80 return "OPEN"
79 return "OPEN"
81 return event_name
80 return event_name
82
81
@@ -85,51 +84,42 b' class Event(Base, BaseModel):'
85 db_session = get_db_session(db_session)
84 db_session = get_db_session(db_session)
86 db_session.flush()
85 db_session.flush()
87 if not resource:
86 if not resource:
88 resource = ResourceService.by_resource_id(self.resource_id)
87 resource = Resource.by_resource_id(self.resource_id)
89 if not request:
88 if not request:
90 request = get_current_request()
89 request = get_current_request()
91 if not resource:
90 if not resource:
92 return
91 return
93 users = set([p.user for p in ResourceService.users_for_perm(resource, "view")])
92 users = set([p.user for p in resource.users_for_perm('view')])
94 for user in users:
93 for user in users:
95 for channel in user.alert_channels:
94 for channel in user.alert_channels:
96 matches_resource = not channel.resources or resource in [
95 matches_resource = not channel.resources or resource in [r.resource_id for r in channel.resources]
97 r.resource_id for r in channel.resources
98 ]
99 if (
96 if (
100 not channel.channel_validated
97 not channel.channel_validated or
101 or not channel.send_alerts
98 not channel.send_alerts or
102 or not matches_resource
99 not matches_resource
103 ):
100 ):
104 continue
101 continue
105 else:
102 else:
106 try:
103 try:
107 channel.notify_alert(
104 channel.notify_alert(resource=resource,
108 resource=resource, event=self, user=user, request=request
105 event=self,
109 )
106 user=user,
107 request=request)
110 except IntegrationException as e:
108 except IntegrationException as e:
111 log.warning("%s" % e)
109 log.warning('%s' % e)
112
110
113 def validate_or_close(self, since_when, db_session=None):
111 def validate_or_close(self, since_when, db_session=None):
114 """ Checks if alerts should stay open or it's time to close them.
112 """ Checks if alerts should stay open or it's time to close them.
115 Generates close alert event if alerts get closed """
113 Generates close alert event if alerts get closed """
116 event_types = [
114 event_types = [Event.types['error_report_alert'],
117 Event.types["error_report_alert"],
115 Event.types['slow_report_alert']]
118 Event.types["slow_report_alert"],
116 app = Resource.by_resource_id(self.resource_id)
119 ]
120 app = ResourceService.by_resource_id(self.resource_id)
121 # if app was deleted close instantly
122 if not app:
123 self.close()
124 return
125
126 if self.event_type in event_types:
117 if self.event_type in event_types:
127 total = ReportStatService.count_by_type(
118 total = ReportStatService.count_by_type(
128 self.event_type, self.resource_id, since_when
119 self.event_type, self.resource_id, since_when)
129 )
120 if Event.types['error_report_alert'] == self.event_type:
130 if Event.types["error_report_alert"] == self.event_type:
131 threshold = app.error_report_threshold
121 threshold = app.error_report_threshold
132 if Event.types["slow_report_alert"] == self.event_type:
122 if Event.types['slow_report_alert'] == self.event_type:
133 threshold = app.slow_report_threshold
123 threshold = app.slow_report_threshold
134
124
135 if total < threshold:
125 if total < threshold:
@@ -140,31 +130,31 b' class Event(Base, BaseModel):'
140 Closes an event and sends notification to affected users
130 Closes an event and sends notification to affected users
141 """
131 """
142 self.end_date = datetime.utcnow()
132 self.end_date = datetime.utcnow()
143 self.status = Event.statuses["closed"]
133 self.status = Event.statuses['closed']
144 log.warning("ALERT: CLOSE: %s" % self)
134 log.warning('ALERT: CLOSE: %s' % self)
145 self.send_alerts()
135 self.send_alerts()
146
136
147 def text_representation(self):
137 def text_representation(self):
148 alert_type = self.unified_alert_name()
138 alert_type = self.unified_alert_name()
149 text = ""
139 text = ''
150 if "slow_report" in alert_type:
140 if 'slow_report' in alert_type:
151 text += "Slow report alert"
141 text += 'Slow report alert'
152 if "error_report" in alert_type:
142 if 'error_report' in alert_type:
153 text += "Exception report alert"
143 text += 'Exception report alert'
154 if "uptime_alert" in alert_type:
144 if 'uptime_alert' in alert_type:
155 text += "Uptime alert"
145 text += 'Uptime alert'
156 if "chart_alert" in alert_type:
146 if 'chart_alert' in alert_type:
157 text += "Metrics value alert"
147 text += 'Metrics value alert'
158
148
159 alert_action = self.unified_alert_action()
149 alert_action = self.unified_alert_action()
160 if alert_action == "OPEN":
150 if alert_action == 'OPEN':
161 text += " got opened."
151 text += ' got opened.'
162 if alert_action == "CLOSE":
152 if alert_action == 'CLOSE':
163 text += " got closed."
153 text += ' got closed.'
164 return text
154 return text
165
155
166 def get_dict(self, request=None):
156 def get_dict(self, request=None):
167 dict_data = super(Event, self).get_dict()
157 dict_data = super(Event, self).get_dict()
168 dict_data["text"] = self.text_representation()
158 dict_data['text'] = self.text_representation()
169 dict_data["resource_name"] = self.resource.resource_name
159 dict_data['resource_name'] = self.resource.resource_name
170 return dict_data
160 return dict_data
@@ -25,12 +25,12 b' from appenlight.lib.sqlalchemy_fields import EncryptedUnicode'
25 class ExternalIdentity(ExternalIdentityMixin, Base):
25 class ExternalIdentity(ExternalIdentityMixin, Base):
26 @declared_attr
26 @declared_attr
27 def access_token(self):
27 def access_token(self):
28 return sa.Column(EncryptedUnicode(255), default="")
28 return sa.Column(EncryptedUnicode(255), default='')
29
29
30 @declared_attr
30 @declared_attr
31 def alt_token(self):
31 def alt_token(self):
32 return sa.Column(EncryptedUnicode(255), default="")
32 return sa.Column(EncryptedUnicode(255), default='')
33
33
34 @declared_attr
34 @declared_attr
35 def token_secret(self):
35 def token_secret(self):
36 return sa.Column(EncryptedUnicode(255), default="")
36 return sa.Column(EncryptedUnicode(255), default='')
@@ -19,28 +19,27 b' from appenlight.models import Base'
19
19
20
20
21 class Group(GroupMixin, Base):
21 class Group(GroupMixin, Base):
22 __possible_permissions__ = (
22 __possible_permissions__ = ('root_administration',
23 "root_administration",
23 'test_features',
24 "test_features",
24 'admin_panel',
25 "admin_panel",
25 'admin_users',
26 "admin_users",
26 'manage_partitions',)
27 "manage_partitions",
28 )
29
27
30 def get_dict(self, exclude_keys=None, include_keys=None, include_perms=False):
28 def get_dict(self, exclude_keys=None, include_keys=None,
29 include_perms=False):
31 result = super(Group, self).get_dict(exclude_keys, include_keys)
30 result = super(Group, self).get_dict(exclude_keys, include_keys)
32 if include_perms:
31 if include_perms:
33 result["possible_permissions"] = self.__possible_permissions__
32 result['possible_permissions'] = self.__possible_permissions__
34 result["current_permissions"] = [p.perm_name for p in self.permissions]
33 result['current_permissions'] = [p.perm_name for p in
34 self.permissions]
35 else:
35 else:
36 result["possible_permissions"] = []
36 result['possible_permissions'] = []
37 result["current_permissions"] = []
37 result['current_permissions'] = []
38 exclude_keys_list = exclude_keys or []
38 exclude_keys_list = exclude_keys or []
39 include_keys_list = include_keys or []
39 include_keys_list = include_keys or []
40 d = {}
40 d = {}
41 for k in result.keys():
41 for k in result.keys():
42 if k not in exclude_keys_list and (
42 if (k not in exclude_keys_list and
43 k in include_keys_list or not include_keys
43 (k in include_keys_list or not include_keys)):
44 ):
45 d[k] = result[k]
44 d[k] = result[k]
46 return d
45 return d
@@ -14,9 +14,8 b''
14 # See the License for the specific language governing permissions and
14 # See the License for the specific language governing permissions and
15 # limitations under the License.
15 # limitations under the License.
16
16
17 from ziggurat_foundations.models.group_resource_permission import (
17 from ziggurat_foundations.models.group_resource_permission import \
18 GroupResourcePermissionMixin,
18 GroupResourcePermissionMixin
19 )
20 from appenlight.models import Base
19 from appenlight.models import Base
21
20
22
21
@@ -32,37 +32,34 b' class IntegrationBase(Base, BaseModel):'
32 """
32 """
33 Model from which all integrations inherit using polymorphic approach
33 Model from which all integrations inherit using polymorphic approach
34 """
34 """
35
35 __tablename__ = 'integrations'
36 __tablename__ = "integrations"
37
36
38 front_visible = False
37 front_visible = False
39 as_alert_channel = False
38 as_alert_channel = False
40 supports_report_alerting = False
39 supports_report_alerting = False
41
40
42 id = sa.Column(sa.Integer, primary_key=True)
41 id = sa.Column(sa.Integer, primary_key=True)
43 resource_id = sa.Column(sa.Integer, sa.ForeignKey("applications.resource_id"))
42 resource_id = sa.Column(sa.Integer,
43 sa.ForeignKey('applications.resource_id'))
44 integration_name = sa.Column(sa.Unicode(64))
44 integration_name = sa.Column(sa.Unicode(64))
45 _config = sa.Column("config", JSON(), nullable=False, default="")
45 _config = sa.Column('config', JSON(), nullable=False, default='')
46 modified_date = sa.Column(sa.DateTime)
46 modified_date = sa.Column(sa.DateTime)
47
47
48 channel = sa.orm.relationship(
48 channel = sa.orm.relationship('AlertChannel',
49 "AlertChannel",
49 cascade="all,delete-orphan",
50 cascade="all,delete-orphan",
50 passive_deletes=True,
51 passive_deletes=True,
51 passive_updates=True,
52 passive_updates=True,
52 uselist=False,
53 uselist=False,
53 backref='integration')
54 backref="integration",
55 )
56
54
57 __mapper_args__ = {
55 __mapper_args__ = {
58 "polymorphic_on": "integration_name",
56 'polymorphic_on': 'integration_name',
59 "polymorphic_identity": "integration",
57 'polymorphic_identity': 'integration'
60 }
58 }
61
59
62 @classmethod
60 @classmethod
63 def by_app_id_and_integration_name(
61 def by_app_id_and_integration_name(cls, resource_id, integration_name,
64 cls, resource_id, integration_name, db_session=None
62 db_session=None):
65 ):
66 db_session = get_db_session(db_session)
63 db_session = get_db_session(db_session)
67 query = db_session.query(cls)
64 query = db_session.query(cls)
68 query = query.filter(cls.integration_name == integration_name)
65 query = query.filter(cls.integration_name == integration_name)
@@ -75,6 +72,7 b' class IntegrationBase(Base, BaseModel):'
75
72
76 @config.setter
73 @config.setter
77 def config(self, value):
74 def config(self, value):
78 if not hasattr(value, "items"):
75 if not hasattr(value, 'items'):
79 raise Exception("IntegrationBase.config only accepts " "flat dictionaries")
76 raise Exception('IntegrationBase.config only accepts '
77 'flat dictionaries')
80 self._config = encrypt_dictionary_keys(value)
78 self._config = encrypt_dictionary_keys(value)
@@ -16,7 +16,8 b''
16
16
17 import requests
17 import requests
18 from requests_oauthlib import OAuth1
18 from requests_oauthlib import OAuth1
19 from appenlight.models.integrations import IntegrationBase, IntegrationException
19 from appenlight.models.integrations import (IntegrationBase,
20 IntegrationException)
20
21
21 _ = str
22 _ = str
22
23
@@ -26,12 +27,14 b' class NotFoundException(Exception):'
26
27
27
28
28 class BitbucketIntegration(IntegrationBase):
29 class BitbucketIntegration(IntegrationBase):
29 __mapper_args__ = {"polymorphic_identity": "bitbucket"}
30 __mapper_args__ = {
31 'polymorphic_identity': 'bitbucket'
32 }
30 front_visible = True
33 front_visible = True
31 as_alert_channel = False
34 as_alert_channel = False
32 supports_report_alerting = False
35 supports_report_alerting = False
33 action_notification = True
36 action_notification = True
34 integration_action = "Add issue to Bitbucket"
37 integration_action = 'Add issue to Bitbucket'
35
38
36 @classmethod
39 @classmethod
37 def create_client(cls, request, user_name=None, repo_name=None):
40 def create_client(cls, request, user_name=None, repo_name=None):
@@ -43,36 +46,27 b' class BitbucketIntegration(IntegrationBase):'
43 token = None
46 token = None
44 secret = None
47 secret = None
45 for identity in request.user.external_identities:
48 for identity in request.user.external_identities:
46 if identity.provider_name == "bitbucket":
49 if identity.provider_name == 'bitbucket':
47 token = identity.access_token
50 token = identity.access_token
48 secret = identity.token_secret
51 secret = identity.token_secret
49 break
52 break
50 if not token:
53 if not token:
51 raise IntegrationException("No valid auth token present for this service")
54 raise IntegrationException(
52 client = BitbucketClient(
55 'No valid auth token present for this service')
53 token,
56 client = BitbucketClient(token, secret,
54 secret,
57 user_name,
55 user_name,
58 repo_name,
56 repo_name,
59 config['authomatic.pr.bitbucket.key'],
57 config["authomatic.pr.bitbucket.key"],
60 config['authomatic.pr.bitbucket.secret'])
58 config["authomatic.pr.bitbucket.secret"],
59 )
60 return client
61 return client
61
62
62
63
63 class BitbucketClient(object):
64 class BitbucketClient(object):
64 api_url = "https://bitbucket.org/api/1.0"
65 api_url = 'https://bitbucket.org/api/1.0'
65 repo_type = "bitbucket"
66 repo_type = 'bitbucket'
66
67
67 def __init__(
68 def __init__(self, token, secret, owner, repo_name, bitbucket_consumer_key,
68 self,
69 bitbucket_consumer_secret):
69 token,
70 secret,
71 owner,
72 repo_name,
73 bitbucket_consumer_key,
74 bitbucket_consumer_secret,
75 ):
76 self.access_token = token
70 self.access_token = token
77 self.token_secret = secret
71 self.token_secret = secret
78 self.owner = owner
72 self.owner = owner
@@ -81,108 +75,89 b' class BitbucketClient(object):'
81 self.bitbucket_consumer_secret = bitbucket_consumer_secret
75 self.bitbucket_consumer_secret = bitbucket_consumer_secret
82
76
83 possible_keys = {
77 possible_keys = {
84 "status": [
78 'status': ['new', 'open', 'resolved', 'on hold', 'invalid',
85 "new",
79 'duplicate', 'wontfix'],
86 "open",
80 'priority': ['trivial', 'minor', 'major', 'critical', 'blocker'],
87 "resolved",
81 'kind': ['bug', 'enhancement', 'proposal', 'task']
88 "on hold",
89 "invalid",
90 "duplicate",
91 "wontfix",
92 ],
93 "priority": ["trivial", "minor", "major", "critical", "blocker"],
94 "kind": ["bug", "enhancement", "proposal", "task"],
95 }
82 }
96
83
97 def get_statuses(self):
84 def get_statuses(self):
98 """Gets list of possible item statuses"""
85 """Gets list of possible item statuses"""
99 return self.possible_keys["status"]
86 return self.possible_keys['status']
100
87
101 def get_priorities(self):
88 def get_priorities(self):
102 """Gets list of possible item statuses"""
89 """Gets list of possible item statuses"""
103 return self.possible_keys["priority"]
90 return self.possible_keys['priority']
104
91
105 def make_request(self, url, method="get", data=None, headers=None):
92 def make_request(self, url, method='get', data=None, headers=None):
106 """
93 """
107 Performs HTTP request to bitbucket
94 Performs HTTP request to bitbucket
108 """
95 """
109 auth = OAuth1(
96 auth = OAuth1(self.bitbucket_consumer_key,
110 self.bitbucket_consumer_key,
97 self.bitbucket_consumer_secret,
111 self.bitbucket_consumer_secret,
98 self.access_token, self.token_secret)
112 self.access_token,
113 self.token_secret,
114 )
115 try:
99 try:
116 resp = getattr(requests, method)(url, data=data, auth=auth, timeout=10)
100 resp = getattr(requests, method)(url, data=data, auth=auth,
101 timeout=10)
117 except Exception as e:
102 except Exception as e:
118 raise IntegrationException(
103 raise IntegrationException(
119 _("Error communicating with Bitbucket: %s") % (e,)
104 _('Error communicating with Bitbucket: %s') % (e,))
120 )
121 if resp.status_code == 401:
105 if resp.status_code == 401:
122 raise IntegrationException(_("You are not authorized to access this repo"))
106 raise IntegrationException(
107 _('You are not authorized to access this repo'))
123 elif resp.status_code == 404:
108 elif resp.status_code == 404:
124 raise IntegrationException(_("User or repo name are incorrect"))
109 raise IntegrationException(_('User or repo name are incorrect'))
125 elif resp.status_code not in [200, 201]:
110 elif resp.status_code not in [200, 201]:
126 raise IntegrationException(
111 raise IntegrationException(
127 _("Bitbucket response_code: %s") % resp.status_code
112 _('Bitbucket response_code: %s') % resp.status_code)
128 )
129 try:
113 try:
130 return resp.json()
114 return resp.json()
131 except Exception as e:
115 except Exception as e:
132 raise IntegrationException(
116 raise IntegrationException(
133 _("Error decoding response from Bitbucket: %s") % (e,)
117 _('Error decoding response from Bitbucket: %s') % (e,))
134 )
135
118
136 def get_assignees(self):
119 def get_assignees(self):
137 """Gets list of possible assignees"""
120 """Gets list of possible assignees"""
138 url = "%(api_url)s/privileges/%(owner)s/%(repo_name)s" % {
121 url = '%(api_url)s/privileges/%(owner)s/%(repo_name)s' % {
139 "api_url": self.api_url,
122 'api_url': self.api_url,
140 "owner": self.owner,
123 'owner': self.owner,
141 "repo_name": self.repo_name,
124 'repo_name': self.repo_name}
142 }
143
125
144 data = self.make_request(url)
126 data = self.make_request(url)
145 results = [{"user": self.owner, "name": "(Repo owner)"}]
127 results = [{'user': self.owner, 'name': '(Repo owner)'}]
146 if data:
128 if data:
147 for entry in data:
129 for entry in data:
148 results.append(
130 results.append({"user": entry['user']['username'],
149 {
131 "name": entry['user'].get('display_name')})
150 "user": entry["user"]["username"],
151 "name": entry["user"].get("display_name"),
152 }
153 )
154 return results
132 return results
155
133
156 def create_issue(self, form_data):
134 def create_issue(self, form_data):
157 """
135 """
158 Sends creates a new issue in tracker using REST call
136 Sends creates a new issue in tracker using REST call
159 """
137 """
160 url = "%(api_url)s/repositories/%(owner)s/%(repo_name)s/issues/" % {
138 url = '%(api_url)s/repositories/%(owner)s/%(repo_name)s/issues/' % {
161 "api_url": self.api_url,
139 'api_url': self.api_url,
162 "owner": self.owner,
140 'owner': self.owner,
163 "repo_name": self.repo_name,
141 'repo_name': self.repo_name}
164 }
165
142
166 payload = {
143 payload = {
167 "title": form_data["title"],
144 "title": form_data['title'],
168 "content": form_data["content"],
145 "content": form_data['content'],
169 "kind": form_data["kind"],
146 "kind": form_data['kind'],
170 "priority": form_data["priority"],
147 "priority": form_data['priority'],
171 "responsible": form_data["responsible"],
148 "responsible": form_data['responsible']
172 }
149 }
173 data = self.make_request(url, "post", payload)
150 data = self.make_request(url, 'post', payload)
174 f_args = {
151 f_args = {
175 "owner": self.owner,
152 "owner": self.owner,
176 "repo_name": self.repo_name,
153 "repo_name": self.repo_name,
177 "issue_id": data["local_id"],
154 "issue_id": data['local_id']
178 }
155 }
179 web_url = (
156 web_url = 'https://bitbucket.org/%(owner)s/%(repo_name)s' \
180 "https://bitbucket.org/%(owner)s/%(repo_name)s"
157 '/issue/%(issue_id)s/issue-title' % f_args
181 "/issue/%(issue_id)s/issue-title" % f_args
182 )
183 to_return = {
158 to_return = {
184 "id": data["local_id"],
159 'id': data['local_id'],
185 "resource_url": data["resource_uri"],
160 'resource_url': data['resource_uri'],
186 "web_url": web_url,
161 'web_url': web_url
187 }
162 }
188 return to_return
163 return to_return
@@ -20,7 +20,8 b' from requests.exceptions import HTTPError, ConnectionError'
20 from camplight import Request, Campfire
20 from camplight import Request, Campfire
21 from camplight.exceptions import CamplightException
21 from camplight.exceptions import CamplightException
22
22
23 from appenlight.models.integrations import IntegrationBase, IntegrationException
23 from appenlight.models.integrations import (IntegrationBase,
24 IntegrationException)
24
25
25 _ = str
26 _ = str
26
27
@@ -32,12 +33,14 b' class NotFoundException(Exception):'
32
33
33
34
34 class CampfireIntegration(IntegrationBase):
35 class CampfireIntegration(IntegrationBase):
35 __mapper_args__ = {"polymorphic_identity": "campfire"}
36 __mapper_args__ = {
37 'polymorphic_identity': 'campfire'
38 }
36 front_visible = False
39 front_visible = False
37 as_alert_channel = True
40 as_alert_channel = True
38 supports_report_alerting = True
41 supports_report_alerting = True
39 action_notification = True
42 action_notification = True
40 integration_action = "Message via Campfire"
43 integration_action = 'Message via Campfire'
41
44
42 @classmethod
45 @classmethod
43 def create_client(cls, api_token, account):
46 def create_client(cls, api_token, account):
@@ -47,7 +50,7 b' class CampfireIntegration(IntegrationBase):'
47
50
48 class CampfireClient(object):
51 class CampfireClient(object):
49 def __init__(self, api_token, account):
52 def __init__(self, api_token, account):
50 request = Request("https://%s.campfirenow.com" % account, api_token)
53 request = Request('https://%s.campfirenow.com' % account, api_token)
51 self.campfire = Campfire(request)
54 self.campfire = Campfire(request)
52
55
53 def get_account(self):
56 def get_account(self):
@@ -62,10 +65,10 b' class CampfireClient(object):'
62 except (HTTPError, CamplightException) as e:
65 except (HTTPError, CamplightException) as e:
63 raise IntegrationException(str(e))
66 raise IntegrationException(str(e))
64
67
65 def speak_to_room(self, room, message, sound="RIMSHOT"):
68 def speak_to_room(self, room, message, sound='RIMSHOT'):
66 try:
69 try:
67 room = self.campfire.room(room)
70 room = self.campfire.room(room)
68 room.join()
71 room.join()
69 room.speak(message, type_="TextMessage")
72 room.speak(message, type_='TextMessage')
70 except (HTTPError, CamplightException, ConnectionError) as e:
73 except (HTTPError, CamplightException, ConnectionError) as e:
71 raise IntegrationException(str(e))
74 raise IntegrationException(str(e))
@@ -20,7 +20,8 b' import requests'
20 from requests.auth import HTTPBasicAuth
20 from requests.auth import HTTPBasicAuth
21 import simplejson as json
21 import simplejson as json
22
22
23 from appenlight.models.integrations import IntegrationBase, IntegrationException
23 from appenlight.models.integrations import (IntegrationBase,
24 IntegrationException)
24
25
25 _ = str
26 _ = str
26
27
@@ -32,12 +33,14 b' class NotFoundException(Exception):'
32
33
33
34
34 class FlowdockIntegration(IntegrationBase):
35 class FlowdockIntegration(IntegrationBase):
35 __mapper_args__ = {"polymorphic_identity": "flowdock"}
36 __mapper_args__ = {
37 'polymorphic_identity': 'flowdock'
38 }
36 front_visible = False
39 front_visible = False
37 as_alert_channel = True
40 as_alert_channel = True
38 supports_report_alerting = True
41 supports_report_alerting = True
39 action_notification = True
42 action_notification = True
40 integration_action = "Message via Flowdock"
43 integration_action = 'Message via Flowdock'
41
44
42 @classmethod
45 @classmethod
43 def create_client(cls, api_token):
46 def create_client(cls, api_token):
@@ -47,37 +50,33 b' class FlowdockIntegration(IntegrationBase):'
47
50
48 class FlowdockClient(object):
51 class FlowdockClient(object):
49 def __init__(self, api_token):
52 def __init__(self, api_token):
50 self.auth = HTTPBasicAuth(api_token, "")
53 self.auth = HTTPBasicAuth(api_token, '')
51 self.api_token = api_token
54 self.api_token = api_token
52 self.api_url = "https://api.flowdock.com/v1/messages"
55 self.api_url = 'https://api.flowdock.com/v1/messages'
53
56
54 def make_request(self, url, method="get", data=None):
57 def make_request(self, url, method='get', data=None):
55 headers = {
58 headers = {
56 "Content-Type": "application/json",
59 'Content-Type': 'application/json',
57 "User-Agent": "appenlight-flowdock",
60 'User-Agent': 'appenlight-flowdock',
58 }
61 }
59 try:
62 try:
60 if data:
63 if data:
61 data = json.dumps(data)
64 data = json.dumps(data)
62 resp = getattr(requests, method)(
65 resp = getattr(requests, method)(url, data=data, headers=headers,
63 url, data=data, headers=headers, timeout=10
66 timeout=10)
64 )
65 except Exception as e:
67 except Exception as e:
66 raise IntegrationException(
68 raise IntegrationException(
67 _("Error communicating with Flowdock: %s") % (e,)
69 _('Error communicating with Flowdock: %s') % (e,))
68 )
69 if resp.status_code > 299:
70 if resp.status_code > 299:
70 raise IntegrationException(resp.text)
71 raise IntegrationException(resp.text)
71 return resp
72 return resp
72
73
73 def send_to_chat(self, payload):
74 def send_to_chat(self, payload):
74 url = "%(api_url)s/chat/%(api_token)s" % {
75 url = '%(api_url)s/chat/%(api_token)s' % {'api_url': self.api_url,
75 "api_url": self.api_url,
76 'api_token': self.api_token}
76 "api_token": self.api_token,
77 return self.make_request(url, method='post', data=payload).json()
77 }
78 return self.make_request(url, method="post", data=payload).json()
79
78
80 def send_to_inbox(self, payload):
79 def send_to_inbox(self, payload):
81 f_args = {"api_url": self.api_url, "api_token": self.api_token}
80 f_args = {'api_url': self.api_url, 'api_token': self.api_token}
82 url = "%(api_url)s/team_inbox/%(api_token)s" % f_args
81 url = '%(api_url)s/team_inbox/%(api_token)s' % f_args
83 return self.make_request(url, method="post", data=payload).json()
82 return self.make_request(url, method='post', data=payload).json()
@@ -27,12 +27,14 b' class GithubAuthException(Exception):'
27
27
28
28
29 class GithubIntegration(IntegrationBase):
29 class GithubIntegration(IntegrationBase):
30 __mapper_args__ = {"polymorphic_identity": "github"}
30 __mapper_args__ = {
31 'polymorphic_identity': 'github'
32 }
31 front_visible = True
33 front_visible = True
32 as_alert_channel = False
34 as_alert_channel = False
33 supports_report_alerting = False
35 supports_report_alerting = False
34 action_notification = True
36 action_notification = True
35 integration_action = "Add issue to Github"
37 integration_action = 'Add issue to Github'
36
38
37 @classmethod
39 @classmethod
38 def create_client(cls, request, user_name=None, repo_name=None):
40 def create_client(cls, request, user_name=None, repo_name=None):
@@ -43,116 +45,112 b' class GithubIntegration(IntegrationBase):'
43 token = None
45 token = None
44 secret = None
46 secret = None
45 for identity in request.user.external_identities:
47 for identity in request.user.external_identities:
46 if identity.provider_name == "github":
48 if identity.provider_name == 'github':
47 token = identity.access_token
49 token = identity.access_token
48 secret = identity.token_secret
50 secret = identity.token_secret
49 break
51 break
50 if not token:
52 if not token:
51 raise IntegrationException("No valid auth token present for this service")
53 raise IntegrationException(
54 'No valid auth token present for this service')
52 client = GithubClient(token=token, owner=user_name, name=repo_name)
55 client = GithubClient(token=token, owner=user_name, name=repo_name)
53 return client
56 return client
54
57
55
58
56 class GithubClient(object):
59 class GithubClient(object):
57 api_url = "https://api.github.com"
60 api_url = 'https://api.github.com'
58 repo_type = "github"
61 repo_type = 'github'
59
62
60 def __init__(self, token, owner, name):
63 def __init__(self, token, owner, name):
61 self.access_token = token
64 self.access_token = token
62 self.owner = owner
65 self.owner = owner
63 self.name = name
66 self.name = name
64
67
65 def make_request(self, url, method="get", data=None, headers=None):
68 def make_request(self, url, method='get', data=None, headers=None):
66 req_headers = {
69 req_headers = {'User-Agent': 'appenlight',
67 "User-Agent": "appenlight",
70 'Content-Type': 'application/json',
68 "Content-Type": "application/json",
71 'Authorization': 'token %s' % self.access_token}
69 "Authorization": "token %s" % self.access_token,
70 }
71 try:
72 try:
72 if data:
73 if data:
73 data = json.dumps(data)
74 data = json.dumps(data)
74 resp = getattr(requests, method)(
75 resp = getattr(requests, method)(url, data=data,
75 url, data=data, headers=req_headers, timeout=10
76 headers=req_headers,
76 )
77 timeout=10)
77 except Exception as e:
78 except Exception as e:
78 msg = "Error communicating with Github: %s"
79 msg = 'Error communicating with Github: %s'
79 raise IntegrationException(_(msg) % (e,))
80 raise IntegrationException(_(msg) % (e,))
80
81
81 if resp.status_code == 404:
82 if resp.status_code == 404:
82 msg = "User or repo name are incorrect"
83 msg = 'User or repo name are incorrect'
83 raise IntegrationException(_(msg))
84 raise IntegrationException(_(msg))
84 if resp.status_code == 401:
85 if resp.status_code == 401:
85 msg = "You are not authorized to access this repo"
86 msg = 'You are not authorized to access this repo'
86 raise IntegrationException(_(msg))
87 raise IntegrationException(_(msg))
87 elif resp.status_code not in [200, 201]:
88 elif resp.status_code not in [200, 201]:
88 msg = "Github response_code: %s"
89 msg = 'Github response_code: %s'
89 raise IntegrationException(_(msg) % resp.status_code)
90 raise IntegrationException(_(msg) % resp.status_code)
90 try:
91 try:
91 return resp.json()
92 return resp.json()
92 except Exception as e:
93 except Exception as e:
93 msg = "Error decoding response from Github: %s"
94 msg = 'Error decoding response from Github: %s'
94 raise IntegrationException(_(msg) % (e,))
95 raise IntegrationException(_(msg) % (e,))
95
96
96 def get_statuses(self):
97 def get_statuses(self):
97 """Gets list of possible item statuses"""
98 """Gets list of possible item statuses"""
98 url = "%(api_url)s/repos/%(owner)s/%(name)s/labels" % {
99 url = '%(api_url)s/repos/%(owner)s/%(name)s/labels' % {
99 "api_url": self.api_url,
100 'api_url': self.api_url,
100 "owner": self.owner,
101 'owner': self.owner,
101 "name": self.name,
102 'name': self.name}
102 }
103
103
104 data = self.make_request(url)
104 data = self.make_request(url)
105
105
106 statuses = []
106 statuses = []
107 for status in data:
107 for status in data:
108 statuses.append(status["name"])
108 statuses.append(status['name'])
109 return statuses
109 return statuses
110
110
111 def get_repo(self):
111 def get_repo(self):
112 """Gets list of possible item statuses"""
112 """Gets list of possible item statuses"""
113 url = "%(api_url)s/repos/%(owner)s/%(name)s" % {
113 url = '%(api_url)s/repos/%(owner)s/%(name)s' % {
114 "api_url": self.api_url,
114 'api_url': self.api_url,
115 "owner": self.owner,
115 'owner': self.owner,
116 "name": self.name,
116 'name': self.name}
117 }
118
117
119 data = self.make_request(url)
118 data = self.make_request(url)
120 return data
119 return data
121
120
122 def get_assignees(self):
121 def get_assignees(self):
123 """Gets list of possible assignees"""
122 """Gets list of possible assignees"""
124 url = "%(api_url)s/repos/%(owner)s/%(name)s/collaborators" % {
123 url = '%(api_url)s/repos/%(owner)s/%(name)s/collaborators' % {
125 "api_url": self.api_url,
124 'api_url': self.api_url,
126 "owner": self.owner,
125 'owner': self.owner,
127 "name": self.name,
126 'name': self.name}
128 }
129 data = self.make_request(url)
127 data = self.make_request(url)
130 results = []
128 results = []
131 for entry in data:
129 for entry in data:
132 results.append({"user": entry["login"], "name": entry.get("name")})
130 results.append({"user": entry['login'],
131 "name": entry.get('name')})
133 return results
132 return results
134
133
135 def create_issue(self, form_data):
134 def create_issue(self, form_data):
136 """
135 """
137 Make a REST call to create issue in Github's issue tracker
136 Make a REST call to create issue in Github's issue tracker
138 """
137 """
139 url = "%(api_url)s/repos/%(owner)s/%(name)s/issues" % {
138 url = '%(api_url)s/repos/%(owner)s/%(name)s/issues' % {
140 "api_url": self.api_url,
139 'api_url': self.api_url,
141 "owner": self.owner,
140 'owner': self.owner,
142 "name": self.name,
141 'name': self.name}
143 }
144
142
145 payload = {
143 payload = {
146 "title": form_data["title"],
144 "title": form_data['title'],
147 "body": form_data["content"],
145 "body": form_data['content'],
148 "labels": [],
146 "labels": [],
149 "assignee": form_data["responsible"],
147 "assignee": form_data['responsible']
150 }
148 }
151 payload["labels"].extend(form_data["kind"])
149 payload['labels'].extend(form_data['kind'])
152 data = self.make_request(url, "post", data=payload)
150 data = self.make_request(url, 'post', data=payload)
153 to_return = {
151 to_return = {
154 "id": data["number"],
152 'id': data['number'],
155 "resource_url": data["url"],
153 'resource_url': data['url'],
156 "web_url": data["html_url"],
154 'web_url': data['html_url']
157 }
155 }
158 return to_return
156 return to_return
@@ -30,12 +30,14 b' class NotFoundException(Exception):'
30
30
31
31
32 class HipchatIntegration(IntegrationBase):
32 class HipchatIntegration(IntegrationBase):
33 __mapper_args__ = {"polymorphic_identity": "hipchat"}
33 __mapper_args__ = {
34 'polymorphic_identity': 'hipchat'
35 }
34 front_visible = False
36 front_visible = False
35 as_alert_channel = True
37 as_alert_channel = True
36 supports_report_alerting = True
38 supports_report_alerting = True
37 action_notification = True
39 action_notification = True
38 integration_action = "Message via Hipchat"
40 integration_action = 'Message via Hipchat'
39
41
40 @classmethod
42 @classmethod
41 def create_client(cls, api_token):
43 def create_client(cls, api_token):
@@ -46,30 +48,36 b' class HipchatIntegration(IntegrationBase):'
46 class HipchatClient(object):
48 class HipchatClient(object):
47 def __init__(self, api_token):
49 def __init__(self, api_token):
48 self.api_token = api_token
50 self.api_token = api_token
49 self.api_url = "https://api.hipchat.com/v1"
51 self.api_url = 'https://api.hipchat.com/v1'
50
52
51 def make_request(self, endpoint, method="get", data=None):
53 def make_request(self, endpoint, method='get', data=None):
52 headers = {"User-Agent": "appenlight-hipchat"}
54 headers = {
53 url = "%s%s" % (self.api_url, endpoint)
55 'User-Agent': 'appenlight-hipchat',
54 params = {"format": "json", "auth_token": self.api_token}
56 }
57 url = '%s%s' % (self.api_url, endpoint)
58 params = {
59 'format': 'json',
60 'auth_token': self.api_token
61 }
55 try:
62 try:
56 resp = getattr(requests, method)(
63 resp = getattr(requests, method)(url, data=data, headers=headers,
57 url, data=data, headers=headers, params=params, timeout=3
64 params=params,
58 )
65 timeout=3)
59 except Exception as e:
66 except Exception as e:
60 msg = "Error communicating with Hipchat: %s"
67 msg = 'Error communicating with Hipchat: %s'
61 raise IntegrationException(_(msg) % (e,))
68 raise IntegrationException(_(msg) % (e,))
62 if resp.status_code == 404:
69 if resp.status_code == 404:
63 msg = "Error communicating with Hipchat - Room not found"
70 msg = 'Error communicating with Hipchat - Room not found'
64 raise IntegrationException(msg)
71 raise IntegrationException(msg)
65 elif resp.status_code != requests.codes.ok:
72 elif resp.status_code != requests.codes.ok:
66 msg = "Error communicating with Hipchat - status code: %s"
73 msg = 'Error communicating with Hipchat - status code: %s'
67 raise IntegrationException(msg % resp.status_code)
74 raise IntegrationException(msg % resp.status_code)
68 return resp
75 return resp
69
76
70 def get_rooms(self):
77 def get_rooms(self):
71 # not used with notification api token
78 # not used with notification api token
72 return self.make_request("/rooms/list")
79 return self.make_request('/rooms/list')
73
80
74 def send(self, payload):
81 def send(self, payload):
75 return self.make_request("/rooms/message", method="post", data=payload).json()
82 return self.make_request('/rooms/message', method='post',
83 data=payload).json()
@@ -15,7 +15,8 b''
15 # limitations under the License.
15 # limitations under the License.
16
16
17 import jira
17 import jira
18 from appenlight.models.integrations import IntegrationBase, IntegrationException
18 from appenlight.models.integrations import (IntegrationBase,
19 IntegrationException)
19
20
20 _ = str
21 _ = str
21
22
@@ -25,12 +26,14 b' class NotFoundException(Exception):'
25
26
26
27
27 class JiraIntegration(IntegrationBase):
28 class JiraIntegration(IntegrationBase):
28 __mapper_args__ = {"polymorphic_identity": "jira"}
29 __mapper_args__ = {
30 'polymorphic_identity': 'jira'
31 }
29 front_visible = True
32 front_visible = True
30 as_alert_channel = False
33 as_alert_channel = False
31 supports_report_alerting = False
34 supports_report_alerting = False
32 action_notification = True
35 action_notification = True
33 integration_action = "Add issue to Jira"
36 integration_action = 'Add issue to Jira'
34
37
35
38
36 class JiraClient(object):
39 class JiraClient(object):
@@ -41,14 +44,12 b' class JiraClient(object):'
41 self.project = project
44 self.project = project
42 self.request = request
45 self.request = request
43 try:
46 try:
44 self.client = jira.client.JIRA(
47 self.client = jira.client.JIRA(options={'server': host_name},
45 options={"server": host_name}, basic_auth=(user_name, password)
48 basic_auth=(user_name, password))
46 )
47 except jira.JIRAError as e:
49 except jira.JIRAError as e:
48 raise IntegrationException(
50 raise IntegrationException(
49 "Communication problem: HTTP_STATUS:%s, URL:%s "
51 'Communication problem: HTTP_STATUS:%s, URL:%s ' % (
50 % (e.status_code, e.url)
52 e.status_code, e.url))
51 )
52
53
53 def get_projects(self):
54 def get_projects(self):
54 projects = self.client.projects()
55 projects = self.client.projects()
@@ -57,42 +58,42 b' class JiraClient(object):'
57 def get_assignees(self, request):
58 def get_assignees(self, request):
58 """Gets list of possible assignees"""
59 """Gets list of possible assignees"""
59 cache_region = request.registry.cache_regions.redis_sec_30
60 cache_region = request.registry.cache_regions.redis_sec_30
60
61 @cache_region.cache_on_arguments('JiraClient.get_assignees')
61 @cache_region.cache_on_arguments("JiraClient.get_assignees")
62 def cached(project_name):
62 def cached(project_name):
63 users = self.client.search_assignable_users_for_issues(
63 users = self.client.search_assignable_users_for_issues(
64 None, project=project_name
64 None, project=project_name)
65 )
66 results = []
65 results = []
67 for user in users:
66 for user in users:
68 results.append({"id": user.name, "name": user.displayName})
67 results.append({"id": user.name, "name": user.displayName})
69 return results
68 return results
70
71 return cached(self.project)
69 return cached(self.project)
72
70
73 def get_issue_types(self, request):
71 def get_issue_types(self, request):
74 metadata = self.get_metadata(request)
72 metadata = self.get_metadata(request)
75 assignees = self.get_assignees(request)
73 assignees = self.get_assignees(request)
76 parsed_metadata = []
74 parsed_metadata = []
77 for entry in metadata["projects"][0]["issuetypes"]:
75 for entry in metadata['projects'][0]['issuetypes']:
78 issue = {"name": entry["name"], "id": entry["id"], "fields": []}
76 issue = {"name": entry['name'],
79 for i_id, field_i in entry["fields"].items():
77 "id": entry['id'],
78 "fields": []}
79 for i_id, field_i in entry['fields'].items():
80 field = {
80 field = {
81 "name": field_i["name"],
81 "name": field_i['name'],
82 "id": i_id,
82 "id": i_id,
83 "required": field_i["required"],
83 "required": field_i['required'],
84 "values": [],
84 "values": [],
85 "type": field_i["schema"].get("type"),
85 "type": field_i['schema'].get('type')
86 }
86 }
87 if field_i.get("allowedValues"):
87 if field_i.get('allowedValues'):
88 field["values"] = []
88 field['values'] = []
89 for i in field_i["allowedValues"]:
89 for i in field_i['allowedValues']:
90 field["values"].append(
90 field['values'].append(
91 {"id": i["id"], "name": i.get("name", i.get("value", ""))}
91 {'id': i['id'],
92 )
92 'name': i.get('name', i.get('value', ''))
93 if field["id"] == "assignee":
93 })
94 field["values"] = assignees
94 if field['id'] == 'assignee':
95 issue["fields"].append(field)
95 field['values'] = assignees
96 issue['fields'].append(field)
96 parsed_metadata.append(issue)
97 parsed_metadata.append(issue)
97 return parsed_metadata
98 return parsed_metadata
98
99
@@ -101,37 +102,35 b' class JiraClient(object):'
101 # @cache_region.cache_on_arguments('JiraClient.get_metadata')
102 # @cache_region.cache_on_arguments('JiraClient.get_metadata')
102 def cached(project_name):
103 def cached(project_name):
103 return self.client.createmeta(
104 return self.client.createmeta(
104 projectKeys=project_name, expand="projects.issuetypes.fields"
105 projectKeys=project_name, expand='projects.issuetypes.fields')
105 )
106
107 return cached(self.project)
106 return cached(self.project)
108
107
109 def create_issue(self, form_data, request):
108 def create_issue(self, form_data, request):
110 issue_types = self.get_issue_types(request)
109 issue_types = self.get_issue_types(request)
111 payload = {
110 payload = {
112 "project": {"key": form_data["project"]},
111 'project': {'key': form_data['project']},
113 "summary": form_data["title"],
112 'summary': form_data['title'],
114 "description": form_data["content"],
113 'description': form_data['content'],
115 "issuetype": {"id": form_data["issue_type"]},
114 'issuetype': {'id': form_data['issue_type']},
116 "priority": {"id": form_data["priority"]},
115 "priority": {'id': form_data['priority']},
117 "assignee": {"name": form_data["responsible"]},
116 "assignee": {'name': form_data['responsible']},
118 }
117 }
119 for issue_type in issue_types:
118 for issue_type in issue_types:
120 if issue_type["id"] == form_data["issue_type"]:
119 if issue_type['id'] == form_data['issue_type']:
121 for field in issue_type["fields"]:
120 for field in issue_type['fields']:
122 # set some defaults for other required fields
121 # set some defaults for other required fields
123 if field == "reporter":
122 if field == 'reporter':
124 payload["reporter"] = {"id": self.user_name}
123 payload["reporter"] = {'id': self.user_name}
125 if field["required"] and field["id"] not in payload:
124 if field['required'] and field['id'] not in payload:
126 if field["type"] == "array":
125 if field['type'] == 'array':
127 payload[field["id"]] = [field["values"][0]]
126 payload[field['id']] = [field['values'][0], ]
128 elif field["type"] == "string":
127 elif field['type'] == 'string':
129 payload[field["id"]] = ""
128 payload[field['id']] = ''
130 new_issue = self.client.create_issue(fields=payload)
129 new_issue = self.client.create_issue(fields=payload)
131 web_url = self.host_name + "/browse/" + new_issue.key
130 web_url = self.host_name + '/browse/' + new_issue.key
132 to_return = {
131 to_return = {
133 "id": new_issue.id,
132 'id': new_issue.id,
134 "resource_url": new_issue.self,
133 'resource_url': new_issue.self,
135 "web_url": web_url,
134 'web_url': web_url
136 }
135 }
137 return to_return
136 return to_return
@@ -18,7 +18,8 b' import logging'
18
18
19 import requests
19 import requests
20
20
21 from appenlight.models.integrations import IntegrationBase, IntegrationException
21 from appenlight.models.integrations import (IntegrationBase,
22 IntegrationException)
22 from appenlight.lib.ext_json import json
23 from appenlight.lib.ext_json import json
23
24
24 _ = str
25 _ = str
@@ -31,12 +32,14 b' class NotFoundException(Exception):'
31
32
32
33
33 class SlackIntegration(IntegrationBase):
34 class SlackIntegration(IntegrationBase):
34 __mapper_args__ = {"polymorphic_identity": "slack"}
35 __mapper_args__ = {
36 'polymorphic_identity': 'slack'
37 }
35 front_visible = False
38 front_visible = False
36 as_alert_channel = True
39 as_alert_channel = True
37 supports_report_alerting = True
40 supports_report_alerting = True
38 action_notification = True
41 action_notification = True
39 integration_action = "Message via Slack"
42 integration_action = 'Message via Slack'
40
43
41 @classmethod
44 @classmethod
42 def create_client(cls, api_token):
45 def create_client(cls, api_token):
@@ -49,17 +52,23 b' class SlackClient(object):'
49 self.api_url = api_url
52 self.api_url = api_url
50
53
51 def make_request(self, data=None):
54 def make_request(self, data=None):
52 headers = {"User-Agent": "appenlight-slack", "Content-Type": "application/json"}
55 headers = {
56 'User-Agent': 'appenlight-slack',
57 'Content-Type': 'application/json'
58 }
53 try:
59 try:
54 resp = getattr(requests, "post")(
60 resp = getattr(requests, 'post')(self.api_url,
55 self.api_url, data=json.dumps(data), headers=headers, timeout=3
61 data=json.dumps(data),
56 )
62 headers=headers,
63 timeout=3)
57 except Exception as e:
64 except Exception as e:
58 raise IntegrationException(_("Error communicating with Slack: %s") % (e,))
65 raise IntegrationException(
66 _('Error communicating with Slack: %s') % (e,))
59 if resp.status_code != requests.codes.ok:
67 if resp.status_code != requests.codes.ok:
60 msg = "Error communicating with Slack - status code: %s"
68 msg = 'Error communicating with Slack - status code: %s'
61 raise IntegrationException(msg % resp.status_code)
69 raise IntegrationException(msg % resp.status_code)
62 return resp
70 return resp
63
71
64 def send(self, payload):
72 def send(self, payload):
65 return self.make_request("/rooms/message", method="post", data=payload).json()
73 return self.make_request('/rooms/message', method='post',
74 data=payload).json()
@@ -18,7 +18,8 b' import logging'
18
18
19 import requests
19 import requests
20
20
21 from appenlight.models.integrations import IntegrationBase, IntegrationException
21 from appenlight.models.integrations import (IntegrationBase,
22 IntegrationException)
22 from appenlight.models.alert_channel import AlertChannel
23 from appenlight.models.alert_channel import AlertChannel
23 from appenlight.lib.ext_json import json
24 from appenlight.lib.ext_json import json
24
25
@@ -32,12 +33,14 b' class NotFoundException(Exception):'
32
33
33
34
34 class WebhooksIntegration(IntegrationBase):
35 class WebhooksIntegration(IntegrationBase):
35 __mapper_args__ = {"polymorphic_identity": "webhooks"}
36 __mapper_args__ = {
37 'polymorphic_identity': 'webhooks'
38 }
36 front_visible = False
39 front_visible = False
37 as_alert_channel = True
40 as_alert_channel = True
38 supports_report_alerting = True
41 supports_report_alerting = True
39 action_notification = True
42 action_notification = True
40 integration_action = "Message via Webhooks"
43 integration_action = 'Message via Webhooks'
41
44
42 @classmethod
45 @classmethod
43 def create_client(cls, url):
46 def create_client(cls, url):
@@ -49,33 +52,34 b' class WebhooksClient(object):'
49 def __init__(self, url):
52 def __init__(self, url):
50 self.api_url = url
53 self.api_url = url
51
54
52 def make_request(self, url, method="get", data=None):
55 def make_request(self, url, method='get', data=None):
53 headers = {
56 headers = {
54 "Content-Type": "application/json",
57 'Content-Type': 'application/json',
55 "User-Agent": "appenlight-webhooks",
58 'User-Agent': 'appenlight-webhooks',
56 }
59 }
57 try:
60 try:
58 if data:
61 if data:
59 data = json.dumps(data)
62 data = json.dumps(data)
60 resp = getattr(requests, method)(url, data=data, headers=headers, timeout=3)
63 resp = getattr(requests, method)(url, data=data, headers=headers,
64 timeout=3)
61 except Exception as e:
65 except Exception as e:
62 raise IntegrationException(
66 raise IntegrationException(
63 _("Error communicating with Webhooks: {}").format(e)
67 _('Error communicating with Webhooks: {}').format(e))
64 )
65 if resp.status_code > 299:
68 if resp.status_code > 299:
66 raise IntegrationException(
69 raise IntegrationException(
67 "Error communicating with Webhooks - status code: {}".format(
70 'Error communicating with Webhooks - status code: {}'.format(
68 resp.status_code
71 resp.status_code))
69 )
70 )
71 return resp
72 return resp
72
73
73 def send_to_hook(self, payload):
74 def send_to_hook(self, payload):
74 return self.make_request(self.api_url, method="post", data=payload).json()
75 return self.make_request(self.api_url, method='post',
76 data=payload).json()
75
77
76
78
77 class WebhooksAlertChannel(AlertChannel):
79 class WebhooksAlertChannel(AlertChannel):
78 __mapper_args__ = {"polymorphic_identity": "webhooks"}
80 __mapper_args__ = {
81 'polymorphic_identity': 'webhooks'
82 }
79
83
80 def notify_reports(self, **kwargs):
84 def notify_reports(self, **kwargs):
81 """
85 """
@@ -91,28 +95,17 b' class WebhooksAlertChannel(AlertChannel):'
91 """
95 """
92 template_vars = self.get_notification_basic_vars(kwargs)
96 template_vars = self.get_notification_basic_vars(kwargs)
93 payload = []
97 payload = []
94 include_keys = (
98 include_keys = ('id', 'http_status', 'report_type', 'resource_name',
95 "id",
99 'front_url', 'resource_id', 'error', 'url_path',
96 "http_status",
100 'tags', 'duration')
97 "report_type",
101
98 "resource_name",
102 for occurences, report in kwargs['reports']:
99 "front_url",
103 r_dict = report.last_report_ref.get_dict(kwargs['request'],
100 "resource_id",
104 include_keys=include_keys)
101 "error",
105 r_dict['group']['occurences'] = occurences
102 "url_path",
103 "tags",
104 "duration",
105 )
106
107 for occurences, report in kwargs["reports"]:
108 r_dict = report.last_report_ref.get_dict(
109 kwargs["request"], include_keys=include_keys
110 )
111 r_dict["group"]["occurences"] = occurences
112 payload.append(r_dict)
106 payload.append(r_dict)
113 client = WebhooksIntegration.create_client(
107 client = WebhooksIntegration.create_client(
114 self.integration.config["reports_webhook"]
108 self.integration.config['reports_webhook'])
115 )
116 client.send_to_hook(payload)
109 client.send_to_hook(payload)
117
110
118 def notify_alert(self, **kwargs):
111 def notify_alert(self, **kwargs):
@@ -127,19 +120,19 b' class WebhooksAlertChannel(AlertChannel):'
127
120
128 """
121 """
129 payload = {
122 payload = {
130 "alert_action": kwargs["event"].unified_alert_action(),
123 'alert_action': kwargs['event'].unified_alert_action(),
131 "alert_name": kwargs["event"].unified_alert_name(),
124 'alert_name': kwargs['event'].unified_alert_name(),
132 "event_time": kwargs["event"].end_date or kwargs["event"].start_date,
125 'event_time': kwargs['event'].end_date or kwargs[
133 "resource_name": None,
126 'event'].start_date,
134 "resource_id": None,
127 'resource_name': None,
128 'resource_id': None
135 }
129 }
136 if kwargs["event"].values and kwargs["event"].values.get("reports"):
130 if kwargs['event'].values and kwargs['event'].values.get('reports'):
137 payload["reports"] = kwargs["event"].values.get("reports", [])
131 payload['reports'] = kwargs['event'].values.get('reports', [])
138 if "application" in kwargs:
132 if 'application' in kwargs:
139 payload["resource_name"] = kwargs["application"].resource_name
133 payload['resource_name'] = kwargs['application'].resource_name
140 payload["resource_id"] = kwargs["application"].resource_id
134 payload['resource_id'] = kwargs['application'].resource_id
141
135
142 client = WebhooksIntegration.create_client(
136 client = WebhooksIntegration.create_client(
143 self.integration.config["alerts_webhook"]
137 self.integration.config['alerts_webhook'])
144 )
145 client.send_to_hook(payload)
138 client.send_to_hook(payload)
@@ -29,23 +29,21 b' log = logging.getLogger(__name__)'
29
29
30
30
31 class Log(Base, BaseModel):
31 class Log(Base, BaseModel):
32 __tablename__ = "logs"
32 __tablename__ = 'logs'
33 __table_args__ = {"implicit_returning": False}
33 __table_args__ = {'implicit_returning': False}
34
34
35 log_id = sa.Column(sa.BigInteger(), nullable=False, primary_key=True)
35 log_id = sa.Column(sa.BigInteger(), nullable=False, primary_key=True)
36 resource_id = sa.Column(
36 resource_id = sa.Column(sa.Integer(),
37 sa.Integer(),
37 sa.ForeignKey('applications.resource_id',
38 sa.ForeignKey(
38 onupdate='CASCADE',
39 "applications.resource_id", onupdate="CASCADE", ondelete="CASCADE"
39 ondelete='CASCADE'),
40 ),
40 nullable=False,
41 nullable=False,
41 index=True)
42 index=True,
42 log_level = sa.Column(sa.Unicode, nullable=False, index=True,
43 )
43 default='INFO')
44 log_level = sa.Column(sa.Unicode, nullable=False, index=True, default="INFO")
44 message = sa.Column(sa.UnicodeText(), default='')
45 message = sa.Column(sa.UnicodeText(), default="")
45 timestamp = sa.Column(sa.DateTime(), default=datetime.utcnow,
46 timestamp = sa.Column(
46 server_default=sa.func.now())
47 sa.DateTime(), default=datetime.utcnow, server_default=sa.func.now()
48 )
49 request_id = sa.Column(sa.Unicode())
47 request_id = sa.Column(sa.Unicode())
50 namespace = sa.Column(sa.Unicode())
48 namespace = sa.Column(sa.Unicode())
51 primary_key = sa.Column(sa.Unicode())
49 primary_key = sa.Column(sa.Unicode())
@@ -54,40 +52,39 b' class Log(Base, BaseModel):'
54 permanent = sa.Column(sa.Boolean(), nullable=False, default=False)
52 permanent = sa.Column(sa.Boolean(), nullable=False, default=False)
55
53
56 def __str__(self):
54 def __str__(self):
57 return self.__unicode__().encode("utf8")
55 return self.__unicode__().encode('utf8')
58
56
59 def __unicode__(self):
57 def __unicode__(self):
60 return "<Log id:%s, lv:%s, ns:%s >" % (
58 return '<Log id:%s, lv:%s, ns:%s >' % (
61 self.log_id,
59 self.log_id, self.log_level, self.namespace)
62 self.log_level,
63 self.namespace,
64 )
65
60
66 def set_data(self, data, resource):
61 def set_data(self, data, resource):
67 level = data.get("log_level").upper()
62 level = data.get('log_level').upper()
68 self.log_level = getattr(LogLevel, level, LogLevel.UNKNOWN)
63 self.log_level = getattr(LogLevel, level, LogLevel.UNKNOWN)
69 self.message = data.get("message", "")
64 self.message = data.get('message', '')
70 server_name = data.get("server", "").lower() or "unknown"
65 server_name = data.get('server', '').lower() or 'unknown'
71 self.tags = {"server_name": server_name}
66 self.tags = {
72 if data.get("tags"):
67 'server_name': server_name
73 for tag_tuple in data["tags"]:
68 }
69 if data.get('tags'):
70 for tag_tuple in data['tags']:
74 self.tags[tag_tuple[0]] = tag_tuple[1]
71 self.tags[tag_tuple[0]] = tag_tuple[1]
75 self.timestamp = data["date"]
72 self.timestamp = data['date']
76 r_id = data.get("request_id", "")
73 r_id = data.get('request_id', '')
77 if not r_id:
74 if not r_id:
78 r_id = ""
75 r_id = ''
79 self.request_id = r_id.replace("-", "")
76 self.request_id = r_id.replace('-', '')
80 self.resource_id = resource.resource_id
77 self.resource_id = resource.resource_id
81 self.namespace = data.get("namespace") or ""
78 self.namespace = data.get('namespace') or ''
82 self.permanent = data.get("permanent")
79 self.permanent = data.get('permanent')
83 self.primary_key = data.get("primary_key")
80 self.primary_key = data.get('primary_key')
84 if self.primary_key is not None:
81 if self.primary_key is not None:
85 self.tags["appenlight_primary_key"] = self.primary_key
82 self.tags['appenlight_primary_key'] = self.primary_key
86
83
87 def get_dict(self):
84 def get_dict(self):
88 instance_dict = super(Log, self).get_dict()
85 instance_dict = super(Log, self).get_dict()
89 instance_dict["log_level"] = LogLevel.key_from_value(self.log_level)
86 instance_dict['log_level'] = LogLevel.key_from_value(self.log_level)
90 instance_dict["resource_name"] = self.application.resource_name
87 instance_dict['resource_name'] = self.application.resource_name
91 return instance_dict
88 return instance_dict
92
89
93 @property
90 @property
@@ -95,38 +92,39 b' class Log(Base, BaseModel):'
95 if not self.primary_key:
92 if not self.primary_key:
96 return None
93 return None
97
94
98 to_hash = "{}_{}_{}".format(self.resource_id, self.primary_key, self.namespace)
95 to_hash = '{}_{}_{}'.format(self.resource_id, self.primary_key,
99 return hashlib.sha1(to_hash.encode("utf8")).hexdigest()
96 self.namespace)
97 return hashlib.sha1(to_hash.encode('utf8')).hexdigest()
100
98
101 def es_doc(self):
99 def es_doc(self):
102 tags = {}
100 tags = {}
103 tag_list = []
101 tag_list = []
104 for name, value in self.tags.items():
102 for name, value in self.tags.items():
105 # replace dot in indexed tag name
103 # replace dot in indexed tag name
106 name = name.replace(".", "_")
104 name = name.replace('.', '_')
107 tag_list.append(name)
105 tag_list.append(name)
108 tags[name] = {
106 tags[name] = {
109 "values": convert_es_type(value),
107 "values": convert_es_type(value),
110 "numeric_values": value
108 "numeric_values": value if (
111 if (isinstance(value, (int, float)) and not isinstance(value, bool))
109 isinstance(value, (int, float)) and
112 else None,
110 not isinstance(value, bool)) else None
113 }
111 }
114 return {
112 return {
115 "log_id": str(self.log_id),
113 'pg_id': str(self.log_id),
116 "delete_hash": self.delete_hash,
114 'delete_hash': self.delete_hash,
117 "resource_id": self.resource_id,
115 'resource_id': self.resource_id,
118 "request_id": self.request_id,
116 'request_id': self.request_id,
119 "log_level": LogLevel.key_from_value(self.log_level),
117 'log_level': LogLevel.key_from_value(self.log_level),
120 "timestamp": self.timestamp,
118 'timestamp': self.timestamp,
121 "message": self.message if self.message else "",
119 'message': self.message if self.message else '',
122 "namespace": self.namespace if self.namespace else "",
120 'namespace': self.namespace if self.namespace else '',
123 "tags": tags,
121 'tags': tags,
124 "tag_list": tag_list,
122 'tag_list': tag_list
125 }
123 }
126
124
127 @property
125 @property
128 def partition_id(self):
126 def partition_id(self):
129 if self.permanent:
127 if self.permanent:
130 return "rcae_l_%s" % self.timestamp.strftime("%Y_%m")
128 return 'rcae_l_%s' % self.timestamp.strftime('%Y_%m')
131 else:
129 else:
132 return "rcae_l_%s" % self.timestamp.strftime("%Y_%m_%d")
130 return 'rcae_l_%s' % self.timestamp.strftime('%Y_%m_%d')
@@ -25,45 +25,40 b' from appenlight.models import Base'
25
25
26
26
27 class Metric(Base, BaseModel):
27 class Metric(Base, BaseModel):
28 __tablename__ = "metrics"
28 __tablename__ = 'metrics'
29 __table_args__ = {"implicit_returning": False}
29 __table_args__ = {'implicit_returning': False}
30
30
31 pkey = sa.Column(sa.BigInteger(), primary_key=True)
31 pkey = sa.Column(sa.BigInteger(), primary_key=True)
32 resource_id = sa.Column(
32 resource_id = sa.Column(sa.Integer(),
33 sa.Integer(),
33 sa.ForeignKey('applications.resource_id'),
34 sa.ForeignKey("applications.resource_id"),
34 nullable=False, primary_key=True)
35 nullable=False,
35 timestamp = sa.Column(sa.DateTime(), default=datetime.utcnow,
36 primary_key=True,
36 server_default=sa.func.now())
37 )
38 timestamp = sa.Column(
39 sa.DateTime(), default=datetime.utcnow, server_default=sa.func.now()
40 )
41 tags = sa.Column(JSON(), default={})
37 tags = sa.Column(JSON(), default={})
42 namespace = sa.Column(sa.Unicode(255))
38 namespace = sa.Column(sa.Unicode(255))
43
39
44 @property
40 @property
45 def partition_id(self):
41 def partition_id(self):
46 return "rcae_m_%s" % self.timestamp.strftime("%Y_%m_%d")
42 return 'rcae_m_%s' % self.timestamp.strftime('%Y_%m_%d')
47
43
48 def es_doc(self):
44 def es_doc(self):
49 tags = {}
45 tags = {}
50 tag_list = []
46 tag_list = []
51 for name, value in self.tags.items():
47 for name, value in self.tags.items():
52 # replace dot in indexed tag name
48 # replace dot in indexed tag name
53 name = name.replace(".", "_")
49 name = name.replace('.', '_')
54 tag_list.append(name)
50 tag_list.append(name)
55 tags[name] = {
51 tags[name] = {
56 "values": convert_es_type(value),
52 "values": convert_es_type(value),
57 "numeric_values": value
53 "numeric_values": value if (
58 if (isinstance(value, (int, float)) and not isinstance(value, bool))
54 isinstance(value, (int, float)) and
59 else None,
55 not isinstance(value, bool)) else None
60 }
56 }
61
57
62 return {
58 return {
63 "metric_id": self.pkey,
59 'resource_id': self.resource_id,
64 "resource_id": self.resource_id,
60 'timestamp': self.timestamp,
65 "timestamp": self.timestamp,
61 'namespace': self.namespace,
66 "namespace": self.namespace,
62 'tags': tags,
67 "tags": tags,
63 'tag_list': tag_list
68 "tag_list": tag_list,
69 }
64 }
@@ -16,25 +16,25 b''
16
16
17 import sqlalchemy as sa
17 import sqlalchemy as sa
18 from ziggurat_foundations.models.base import BaseModel
18 from ziggurat_foundations.models.base import BaseModel
19 from sqlalchemy.dialects.postgresql import JSON
19 from sqlalchemy.dialects.postgres import JSON
20
20
21 from . import Base
21 from . import Base
22
22
23
23
24 class PluginConfig(Base, BaseModel):
24 class PluginConfig(Base, BaseModel):
25 __tablename__ = "plugin_configs"
25 __tablename__ = 'plugin_configs'
26
26
27 id = sa.Column(sa.Integer, primary_key=True)
27 id = sa.Column(sa.Integer, primary_key=True)
28 plugin_name = sa.Column(sa.Unicode)
28 plugin_name = sa.Column(sa.Unicode)
29 section = sa.Column(sa.Unicode)
29 section = sa.Column(sa.Unicode)
30 config = sa.Column(JSON, nullable=False)
30 config = sa.Column(JSON, nullable=False)
31 resource_id = sa.Column(
31 resource_id = sa.Column(sa.Integer(),
32 sa.Integer(),
32 sa.ForeignKey('resources.resource_id',
33 sa.ForeignKey("resources.resource_id", onupdate="cascade", ondelete="cascade"),
33 onupdate='cascade',
34 )
34 ondelete='cascade'))
35 owner_id = sa.Column(
35 owner_id = sa.Column(sa.Integer(),
36 sa.Integer(), sa.ForeignKey("users.id", onupdate="cascade", ondelete="cascade")
36 sa.ForeignKey('users.id', onupdate='cascade',
37 )
37 ondelete='cascade'))
38
38
39 def __json__(self, request):
39 def __json__(self, request):
40 return self.get_dict()
40 return self.get_dict()
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
General Comments 2
Under Review
author

Auto status change to "Under Review"

Rejected

Please use: https://github.com/Appenlight/appenlight to contribute :) Thanks !