Auto status change to "Under Review"
Closed
Pull request !2235
Created on
Sat, 13 Oct 2018 19:27:30,
- ini: added new key
- registration: add a way to disable registration
- Update register.jinja2
- alert_channels: it should also be a pkey
- alert_channels: allow binding to resources
Pull request versions not available.
ver | Time | Author | Commit | Description | ||
---|---|---|---|---|---|---|
13 commits hidden, click expand to show them. |
@@ -84,6 +84,11 b' def main(global_config, **settings):' | |||
|
84 | 84 | encryption.ENCRYPTION_SECRET = settings.get('encryption_secret') |
|
85 | 85 | # import this later so encyption key can be monkeypatched |
|
86 | 86 | from appenlight.models import DBSession, register_datastores |
|
87 | ||
|
88 | # registration | |
|
89 | settings['appenlight.disable_registration'] = asbool( | |
|
90 | settings.get('appenlight.disable_registration')) | |
|
91 | ||
|
87 | 92 | # update config with cometd info |
|
88 | 93 | settings['cometd_servers'] = {'server': settings['cometd.server'], |
|
89 | 94 | 'secret': settings['cometd.secret']} |
@@ -176,6 +176,10 b' def register(request):' | |||
|
176 | 176 | if request.method == 'POST' and form.validate(): |
|
177 | 177 | log.info('registering user') |
|
178 | 178 | # insert new user here |
|
179 | if request.registry.settings['appenlight.disable_registration']: | |
|
180 | request.session.flash(_('Registration is currently disabled.')) | |
|
181 | return HTTPFound(location=request.route_url('/')) | |
|
182 | ||
|
179 | 183 | new_user = User() |
|
180 | 184 | DBSession.add(new_user) |
|
181 | 185 | form.populate_obj(new_user) |
@@ -79,6 +79,10 b' def users_create(request):' | |||
|
79 | 79 | csrf_context=request) |
|
80 | 80 | if form.validate(): |
|
81 | 81 | log.info('registering user') |
|
82 | # probably not needed in the future since this requires root anyways | |
|
83 | # lets keep this here in case we lower view permission in the future | |
|
84 | # if request.registry.settings['appenlight.disable_registration']: | |
|
85 | # return HTTPUnprocessableEntity(body={'error': 'Registration is currently disabled.'}) | |
|
82 | 86 | user = User() |
|
83 | 87 | # insert new user here |
|
84 | 88 | DBSession.add(user) |
@@ -77,6 +77,11 b'' | |||
|
77 | 77 | {{ widgets.render_form(form) }} |
|
78 | 78 | <input type="submit" class="btn btn-primary" |
|
79 | 79 | value="Register"> |
|
80 | <p> | |
|
81 | By clicking "Register", you agree to our | |
|
82 | <a href="https://getappenlight.com/page/terms-of-service.html">terms of service</a> and | |
|
83 | <a href="https://getappenlight.com/page/privacy-policy.html">privacy policy</a>. | |
|
84 | </p> | |
|
80 | 85 | </form> |
|
81 | 86 | </div> |
|
82 | 87 | </div> |
@@ -19,10 +19,12 b' def upgrade():' | |||
|
19 | 19 | 'channels_resources', |
|
20 | 20 | sa.Column('channel_pkey', sa.Integer, |
|
21 | 21 | sa.ForeignKey('alert_channels.pkey', |
|
22 |
ondelete='CASCADE', onupdate='CASCADE') |
|
|
22 | ondelete='CASCADE', onupdate='CASCADE'), | |
|
23 | primary_key=True), | |
|
23 | 24 | sa.Column('resource_id', sa.Integer, |
|
24 | 25 | sa.ForeignKey('resources.resource_id', |
|
25 |
ondelete='CASCADE', onupdate='CASCADE') |
|
|
26 | ondelete='CASCADE', onupdate='CASCADE'), | |
|
27 | primary_key=True) | |
|
26 | 28 | ) |
|
27 | 29 | |
|
28 | 30 |
@@ -79,7 +79,7 b' class AlertChannel(Base, BaseModel):' | |||
|
79 | 79 | secondary=channel_rules_m2m_table, |
|
80 | 80 | backref='channels') |
|
81 | 81 | resources = sa.orm.relationship('Resource', |
|
82 |
cascade="all |
|
|
82 | cascade="all", | |
|
83 | 83 | passive_deletes=True, |
|
84 | 84 | passive_updates=True, |
|
85 | 85 | secondary=channel_resources_m2m_table, |
@@ -0,0 +1,30 b'' | |||
|
1 | """connect resources to alert_channels | |
|
2 | ||
|
3 | Revision ID: e9fcfbdd9498 | |
|
4 | Revises: 55b6e612672f | |
|
5 | Create Date: 2018-02-28 13:52:50.717217 | |
|
6 | ||
|
7 | """ | |
|
8 | ||
|
9 | # revision identifiers, used by Alembic. | |
|
10 | revision = 'e9fcfbdd9498' | |
|
11 | down_revision = '55b6e612672f' | |
|
12 | ||
|
13 | from alembic import op | |
|
14 | import sqlalchemy as sa | |
|
15 | ||
|
16 | ||
|
17 | def upgrade(): | |
|
18 | op.create_table( | |
|
19 | 'channels_resources', | |
|
20 | sa.Column('channel_pkey', sa.Integer, | |
|
21 | sa.ForeignKey('alert_channels.pkey', | |
|
22 | ondelete='CASCADE', onupdate='CASCADE')), | |
|
23 | sa.Column('resource_id', sa.Integer, | |
|
24 | sa.ForeignKey('resources.resource_id', | |
|
25 | ondelete='CASCADE', onupdate='CASCADE')) | |
|
26 | ) | |
|
27 | ||
|
28 | ||
|
29 | def downgrade(): | |
|
30 | op.drop_table('channels_resources') |
@@ -34,6 +34,14 b' channel_rules_m2m_table = sa.Table(' | |||
|
34 | 34 | sa.ForeignKey('alert_channels_actions.pkey')) |
|
35 | 35 | ) |
|
36 | 36 | |
|
37 | channel_resources_m2m_table = sa.Table( | |
|
38 | 'channels_resources', Base.metadata, | |
|
39 | sa.Column('channel_pkey', sa.Integer, | |
|
40 | sa.ForeignKey('alert_channels.pkey')), | |
|
41 | sa.Column('resource_id', sa.Integer, | |
|
42 | sa.ForeignKey('resources.resource_id')) | |
|
43 | ) | |
|
44 | ||
|
37 | 45 | DATE_FRMT = '%Y-%m-%dT%H:%M' |
|
38 | 46 | |
|
39 | 47 | |
@@ -70,6 +78,12 b' class AlertChannel(Base, BaseModel):' | |||
|
70 | 78 | passive_updates=True, |
|
71 | 79 | secondary=channel_rules_m2m_table, |
|
72 | 80 | backref='channels') |
|
81 | resources = sa.orm.relationship('Resource', | |
|
82 | cascade="all, delete-orphan", | |
|
83 | passive_deletes=True, | |
|
84 | passive_updates=True, | |
|
85 | secondary=channel_resources_m2m_table, | |
|
86 | backref='resources') | |
|
73 | 87 | |
|
74 | 88 | @property |
|
75 | 89 | def channel_visible_value(self): |
@@ -92,7 +92,12 b' class Event(Base, BaseModel):' | |||
|
92 | 92 | users = set([p.user for p in resource.users_for_perm('view')]) |
|
93 | 93 | for user in users: |
|
94 | 94 | for channel in user.alert_channels: |
|
95 | if not channel.channel_validated or not channel.send_alerts: | |
|
95 | matches_resource = not channel.resources or resource in [r.resource_id for r in channel.resources] | |
|
96 | if ( | |
|
97 | not channel.channel_validated or | |
|
98 | not channel.send_alerts or | |
|
99 | not matches_resource | |
|
100 | ): | |
|
96 | 101 | continue |
|
97 | 102 | else: |
|
98 | 103 | try: |
@@ -0,0 +1,30 b'' | |||
|
1 | """connect resources to alert_channels | |
|
2 | ||
|
3 | Revision ID: e9fcfbdd9498 | |
|
4 | Revises: 55b6e612672f | |
|
5 | Create Date: 2018-02-28 13:52:50.717217 | |
|
6 | ||
|
7 | """ | |
|
8 | ||
|
9 | # revision identifiers, used by Alembic. | |
|
10 | revision = 'e9fcfbdd9498' | |
|
11 | down_revision = '55b6e612672f' | |
|
12 | ||
|
13 | from alembic import op | |
|
14 | import sqlalchemy as sa | |
|
15 | ||
|
16 | ||
|
17 | def upgrade(): | |
|
18 | op.create_table( | |
|
19 | 'channels_resources', | |
|
20 | sa.Column('channel_pkey', sa.Integer, | |
|
21 | sa.ForeignKey('alert_channels.pkey', | |
|
22 | ondelete='CASCADE', onupdate='CASCADE')), | |
|
23 | sa.Column('resource_id', sa.Integer, | |
|
24 | sa.ForeignKey('resources.resource_id', | |
|
25 | ondelete='CASCADE', onupdate='CASCADE')) | |
|
26 | ) | |
|
27 | ||
|
28 | ||
|
29 | def downgrade(): | |
|
30 | op.drop_table('channels_resources') |
@@ -34,6 +34,14 b' channel_rules_m2m_table = sa.Table(' | |||
|
34 | 34 | sa.ForeignKey('alert_channels_actions.pkey')) |
|
35 | 35 | ) |
|
36 | 36 | |
|
37 | channel_resources_m2m_table = sa.Table( | |
|
38 | 'channels_resources', Base.metadata, | |
|
39 | sa.Column('channel_pkey', sa.Integer, | |
|
40 | sa.ForeignKey('alert_channels.pkey')), | |
|
41 | sa.Column('resource_id', sa.Integer, | |
|
42 | sa.ForeignKey('resources.resource_id')) | |
|
43 | ) | |
|
44 | ||
|
37 | 45 | DATE_FRMT = '%Y-%m-%dT%H:%M' |
|
38 | 46 | |
|
39 | 47 | |
@@ -70,6 +78,12 b' class AlertChannel(Base, BaseModel):' | |||
|
70 | 78 | passive_updates=True, |
|
71 | 79 | secondary=channel_rules_m2m_table, |
|
72 | 80 | backref='channels') |
|
81 | resources = sa.orm.relationship('Resource', | |
|
82 | cascade="all, delete-orphan", | |
|
83 | passive_deletes=True, | |
|
84 | passive_updates=True, | |
|
85 | secondary=channel_resources_m2m_table, | |
|
86 | backref='resources') | |
|
73 | 87 | |
|
74 | 88 | @property |
|
75 | 89 | def channel_visible_value(self): |
@@ -92,7 +92,12 b' class Event(Base, BaseModel):' | |||
|
92 | 92 | users = set([p.user for p in resource.users_for_perm('view')]) |
|
93 | 93 | for user in users: |
|
94 | 94 | for channel in user.alert_channels: |
|
95 | if not channel.channel_validated or not channel.send_alerts: | |
|
95 | matches_resource = not channel.resources or resource in [r.resource_id for r in channel.resources] | |
|
96 | if ( | |
|
97 | not channel.channel_validated or | |
|
98 | not channel.send_alerts or | |
|
99 | not matches_resource | |
|
100 | ): | |
|
96 | 101 | continue |
|
97 | 102 | else: |
|
98 | 103 | try: |
@@ -43,7 +43,7 b' from appenlight.lib import cache_regions' | |||
|
43 | 43 | from appenlight.lib.ext_json import json |
|
44 | 44 | from appenlight.security import groupfinder, AuthTokenAuthenticationPolicy |
|
45 | 45 | |
|
46 | __license__ = 'AGPLv3, and Commercial License' | |
|
46 | __license__ = 'Apache 2.0' | |
|
47 | 47 | __author__ = 'RhodeCode GmbH' |
|
48 | 48 | __url__ = 'http://rhodecode.com' |
|
49 | 49 |
@@ -4,8 +4,8 b' AppEnlight' | |||
|
4 | 4 | Automatic Installation |
|
5 | 5 | ====================== |
|
6 | 6 | |
|
7 |
Use the ansible scripts in the ` |
|
|
8 |
You can also use `packer` files in ` |
|
|
7 | Use the ansible scripts in the `automation` repository to build complete instance of application | |
|
8 | You can also use `packer` files in `automation/packer` to create whole VM's for KVM and VMWare. | |
|
9 | 9 | |
|
10 | 10 | Manual Installation |
|
11 | 11 | =================== |
@@ -13,7 +13,7 b' Manual Installation' | |||
|
13 | 13 | To run the app you need to have meet prerequsites: |
|
14 | 14 | |
|
15 | 15 | - python 3.5+ |
|
16 | - running elasticsearch (2.3+ tested) | |
|
16 | - running elasticsearch (2.3+/2.4 tested) | |
|
17 | 17 | - running postgresql (9.5+ required) |
|
18 | 18 | - running redis |
|
19 | 19 | |
@@ -23,7 +23,7 b' Install the app by performing' | |||
|
23 | 23 | |
|
24 | 24 | python setup.py develop |
|
25 | 25 | |
|
26 | Install the appenlight uptime plugin (`ae_uptime_ce` package). | |
|
26 | Install the appenlight uptime plugin (`ae_uptime_ce` package from `appenlight-uptime-ce` repository). | |
|
27 | 27 | |
|
28 | 28 | After installing the application you need to perform following steps: |
|
29 | 29 |
@@ -0,0 +1,220 b'' | |||
|
1 | # Created by .ignore support plugin (hsz.mobi) | |
|
2 | ### Node template | |
|
3 | # Logs | |
|
4 | logs | |
|
5 | *.log | |
|
6 | npm-debug.log* | |
|
7 | yarn-debug.log* | |
|
8 | yarn-error.log* | |
|
9 | ||
|
10 | # Runtime data | |
|
11 | pids | |
|
12 | *.pid | |
|
13 | *.seed | |
|
14 | *.pid.lock | |
|
15 | ||
|
16 | # Directory for instrumented libs generated by jscoverage/JSCover | |
|
17 | lib-cov | |
|
18 | ||
|
19 | # Coverage directory used by tools like istanbul | |
|
20 | coverage | |
|
21 | ||
|
22 | # nyc test coverage | |
|
23 | .nyc_output | |
|
24 | ||
|
25 | # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) | |
|
26 | .grunt | |
|
27 | ||
|
28 | # Bower dependency directory (https://bower.io/) | |
|
29 | bower_components | |
|
30 | ||
|
31 | # node-waf configuration | |
|
32 | .lock-wscript | |
|
33 | ||
|
34 | # Compiled binary addons (https://nodejs.org/api/addons.html) | |
|
35 | build/Release | |
|
36 | ||
|
37 | # Dependency directories | |
|
38 | node_modules/ | |
|
39 | jspm_packages/ | |
|
40 | ||
|
41 | # Typescript v1 declaration files | |
|
42 | typings/ | |
|
43 | ||
|
44 | # Optional npm cache directory | |
|
45 | .npm | |
|
46 | ||
|
47 | # Optional eslint cache | |
|
48 | .eslintcache | |
|
49 | ||
|
50 | # Optional REPL history | |
|
51 | .node_repl_history | |
|
52 | ||
|
53 | # Output of 'npm pack' | |
|
54 | *.tgz | |
|
55 | ||
|
56 | # Yarn Integrity file | |
|
57 | .yarn-integrity | |
|
58 | ||
|
59 | # dotenv environment variables file | |
|
60 | .env | |
|
61 | ||
|
62 | ### JetBrains template | |
|
63 | # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and Webstorm | |
|
64 | # Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 | |
|
65 | ||
|
66 | # User-specific stuff: | |
|
67 | .idea/**/workspace.xml | |
|
68 | .idea/**/tasks.xml | |
|
69 | .idea/dictionaries | |
|
70 | ||
|
71 | # Sensitive or high-churn files: | |
|
72 | .idea/**/dataSources/ | |
|
73 | .idea/**/dataSources.ids | |
|
74 | .idea/**/dataSources.xml | |
|
75 | .idea/**/dataSources.local.xml | |
|
76 | .idea/**/sqlDataSources.xml | |
|
77 | .idea/**/dynamic.xml | |
|
78 | .idea/**/uiDesigner.xml | |
|
79 | ||
|
80 | # Gradle: | |
|
81 | .idea/**/gradle.xml | |
|
82 | .idea/**/libraries | |
|
83 | ||
|
84 | # CMake | |
|
85 | cmake-build-debug/ | |
|
86 | ||
|
87 | # Mongo Explorer plugin: | |
|
88 | .idea/**/mongoSettings.xml | |
|
89 | ||
|
90 | ## File-based project format: | |
|
91 | *.iws | |
|
92 | ||
|
93 | ## Plugin-specific files: | |
|
94 | ||
|
95 | # IntelliJ | |
|
96 | out/ | |
|
97 | ||
|
98 | # mpeltonen/sbt-idea plugin | |
|
99 | .idea_modules/ | |
|
100 | ||
|
101 | # JIRA plugin | |
|
102 | atlassian-ide-plugin.xml | |
|
103 | ||
|
104 | # Cursive Clojure plugin | |
|
105 | .idea/replstate.xml | |
|
106 | ||
|
107 | # Crashlytics plugin (for Android Studio and IntelliJ) | |
|
108 | com_crashlytics_export_strings.xml | |
|
109 | crashlytics.properties | |
|
110 | crashlytics-build.properties | |
|
111 | fabric.properties | |
|
112 | ### Python template | |
|
113 | # Byte-compiled / optimized / DLL files | |
|
114 | __pycache__/ | |
|
115 | *.py[cod] | |
|
116 | *$py.class | |
|
117 | ||
|
118 | # C extensions | |
|
119 | *.so | |
|
120 | ||
|
121 | # Distribution / packaging | |
|
122 | .Python | |
|
123 | build/ | |
|
124 | develop-eggs/ | |
|
125 | dist/ | |
|
126 | downloads/ | |
|
127 | eggs/ | |
|
128 | .eggs/ | |
|
129 | lib/ | |
|
130 | lib64/ | |
|
131 | parts/ | |
|
132 | sdist/ | |
|
133 | var/ | |
|
134 | wheels/ | |
|
135 | *.egg-info/ | |
|
136 | .installed.cfg | |
|
137 | *.egg | |
|
138 | MANIFEST | |
|
139 | ||
|
140 | # PyInstaller | |
|
141 | # Usually these files are written by a python script from a template | |
|
142 | # before PyInstaller builds the exe, so as to inject date/other infos into it. | |
|
143 | *.manifest | |
|
144 | *.spec | |
|
145 | ||
|
146 | # Installer logs | |
|
147 | pip-log.txt | |
|
148 | pip-delete-this-directory.txt | |
|
149 | ||
|
150 | # Unit test / coverage reports | |
|
151 | htmlcov/ | |
|
152 | .tox/ | |
|
153 | .coverage | |
|
154 | .coverage.* | |
|
155 | .cache | |
|
156 | nosetests.xml | |
|
157 | coverage.xml | |
|
158 | *.cover | |
|
159 | .hypothesis/ | |
|
160 | ||
|
161 | # Translations | |
|
162 | *.mo | |
|
163 | *.pot | |
|
164 | ||
|
165 | # Django stuff: | |
|
166 | local_settings.py | |
|
167 | ||
|
168 | # Flask stuff: | |
|
169 | instance/ | |
|
170 | .webassets-cache | |
|
171 | ||
|
172 | # Scrapy stuff: | |
|
173 | .scrapy | |
|
174 | ||
|
175 | # Sphinx documentation | |
|
176 | docs/_build/ | |
|
177 | ||
|
178 | # PyBuilder | |
|
179 | target/ | |
|
180 | ||
|
181 | # Jupyter Notebook | |
|
182 | .ipynb_checkpoints | |
|
183 | ||
|
184 | # pyenv | |
|
185 | .python-version | |
|
186 | ||
|
187 | # celery beat schedule file | |
|
188 | celerybeat-schedule | |
|
189 | ||
|
190 | # SageMath parsed files | |
|
191 | *.sage.py | |
|
192 | ||
|
193 | # Environments | |
|
194 | .venv | |
|
195 | env/ | |
|
196 | venv/ | |
|
197 | ENV/ | |
|
198 | env.bak/ | |
|
199 | venv.bak/ | |
|
200 | ||
|
201 | # Spyder project settings | |
|
202 | .spyderproject | |
|
203 | .spyproject | |
|
204 | ||
|
205 | # Rope project settings | |
|
206 | .ropeproject | |
|
207 | ||
|
208 | # mkdocs documentation | |
|
209 | /site | |
|
210 | ||
|
211 | # mypy | |
|
212 | .mypy_cache/ | |
|
213 | ### Example user template template | |
|
214 | ### Example user template | |
|
215 | ||
|
216 | # IntelliJ project files | |
|
217 | .idea | |
|
218 | *.iml | |
|
219 | out | |
|
220 | gen |
General Comments 2
Please use: https://github.com/Appenlight/appenlight to contribute :) Thanks !