Show More
@@ -1,151 +1,153 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2016 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2016 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # AppEnlight Enterprise Edition, including its added features, Support |
|
18 | # AppEnlight Enterprise Edition, including its added features, Support | |
19 | # services, and proprietary license terms, please see |
|
19 | # services, and proprietary license terms, please see | |
20 | # https://rhodecode.com/licenses/ |
|
20 | # https://rhodecode.com/licenses/ | |
21 |
|
21 | |||
22 | from pyramid.view import view_config |
|
22 | from pyramid.view import view_config | |
23 | from appenlight.models import DBSession, Datastores |
|
23 | from appenlight.models import DBSession, Datastores | |
24 | from appenlight.forms import get_partition_deletion_form |
|
24 | from appenlight.forms import get_partition_deletion_form | |
25 |
|
25 | |||
26 | import logging |
|
26 | import logging | |
27 |
|
27 | |||
28 | from zope.sqlalchemy import mark_changed |
|
28 | from zope.sqlalchemy import mark_changed | |
29 | from datetime import datetime |
|
29 | from datetime import datetime | |
30 | import sqlalchemy as sa |
|
30 | import sqlalchemy as sa | |
31 |
|
31 | |||
32 | log = logging.getLogger(__name__) |
|
32 | log = logging.getLogger(__name__) | |
33 |
|
33 | |||
34 |
|
34 | |||
35 | def get_partition_stats(): |
|
35 | def get_partition_stats(): | |
36 | table_query = """ |
|
36 | table_query = """ | |
37 | SELECT table_name |
|
37 | SELECT table_name | |
38 | FROM information_schema.tables |
|
38 | FROM information_schema.tables | |
39 | GROUP BY table_name |
|
39 | GROUP BY table_name | |
40 | ORDER BY table_name |
|
40 | ORDER BY table_name | |
41 | """ |
|
41 | """ | |
42 |
|
42 | |||
43 | permanent_partitions = {} |
|
43 | permanent_partitions = {} | |
44 | daily_partitions = {} |
|
44 | daily_partitions = {} | |
45 |
|
45 | |||
46 | def is_int(data): |
|
46 | def is_int(data): | |
47 | try: |
|
47 | try: | |
48 | int(data) |
|
48 | int(data) | |
49 | return True |
|
49 | return True | |
50 | except Exception: |
|
50 | except Exception: | |
51 | pass |
|
51 | pass | |
52 | return False |
|
52 | return False | |
53 |
|
53 | |||
54 | def add_key(key, holder): |
|
54 | def add_key(key, holder): | |
55 | if not ix_time in holder: |
|
55 | if not ix_time in holder: | |
56 | holder[ix_time] = {'pg': [], 'elasticsearch': []} |
|
56 | holder[ix_time] = {'pg': [], 'elasticsearch': []} | |
57 |
|
57 | |||
58 | for partition in list(Datastores.es.aliases().keys()): |
|
58 | for partition in list(Datastores.es.aliases().keys()): | |
59 | if not partition.startswith('rcae'): |
|
59 | if not partition.startswith('rcae'): | |
60 | continue |
|
60 | continue | |
61 | split_data = partition.split('_') |
|
61 | split_data = partition.split('_') | |
62 | permanent = False |
|
62 | permanent = False | |
63 | # if we dont have a day then treat it as permanent partion |
|
63 | # if we dont have a day then treat it as permanent partion | |
64 | if False in list(map(is_int, split_data[-3:])): |
|
64 | if False in list(map(is_int, split_data[-3:])): | |
65 | ix_time = datetime(year=int(split_data[-2]), |
|
65 | ix_time = datetime(year=int(split_data[-2]), | |
66 | month=int(split_data[-1]), |
|
66 | month=int(split_data[-1]), | |
67 | day=1).date() |
|
67 | day=1).date() | |
68 | permanent = True |
|
68 | permanent = True | |
69 | else: |
|
69 | else: | |
70 | ix_time = datetime(year=int(split_data[-3]), |
|
70 | ix_time = datetime(year=int(split_data[-3]), | |
71 | month=int(split_data[-2]), |
|
71 | month=int(split_data[-2]), | |
72 | day=int(split_data[-1])).date() |
|
72 | day=int(split_data[-1])).date() | |
73 |
|
73 | |||
74 | ix_time = str(ix_time) |
|
74 | ix_time = str(ix_time) | |
75 | if permanent: |
|
75 | if permanent: | |
76 | add_key(ix_time, permanent_partitions) |
|
76 | add_key(ix_time, permanent_partitions) | |
77 | if ix_time not in permanent_partitions: |
|
77 | if ix_time not in permanent_partitions: | |
78 | permanent_partitions[ix_time]['elasticsearch'] = [] |
|
78 | permanent_partitions[ix_time]['elasticsearch'] = [] | |
79 | permanent_partitions[ix_time]['elasticsearch'].append(partition) |
|
79 | permanent_partitions[ix_time]['elasticsearch'].append(partition) | |
80 | else: |
|
80 | else: | |
81 | add_key(ix_time, daily_partitions) |
|
81 | add_key(ix_time, daily_partitions) | |
82 | if ix_time not in daily_partitions: |
|
82 | if ix_time not in daily_partitions: | |
83 | daily_partitions[ix_time]['elasticsearch'] = [] |
|
83 | daily_partitions[ix_time]['elasticsearch'] = [] | |
84 | daily_partitions[ix_time]['elasticsearch'].append(partition) |
|
84 | daily_partitions[ix_time]['elasticsearch'].append(partition) | |
85 |
|
85 | |||
86 | for row in DBSession.execute(table_query): |
|
86 | for row in DBSession.execute(table_query): | |
87 | splitted = row['table_name'].split('_') |
|
87 | splitted = row['table_name'].split('_') | |
88 | if 'p' in splitted: |
|
88 | if 'p' in splitted: | |
89 | # dealing with partition |
|
89 | # dealing with partition | |
90 | split_data = [int(x) for x in splitted[splitted.index('p') + 1:]] |
|
90 | split_data = [int(x) for x in splitted[splitted.index('p') + 1:]] | |
91 | if len(split_data) == 3: |
|
91 | if len(split_data) == 3: | |
92 | ix_time = datetime(split_data[0], split_data[1], |
|
92 | ix_time = datetime(split_data[0], split_data[1], | |
93 | split_data[2]).date() |
|
93 | split_data[2]).date() | |
94 | ix_time = str(ix_time) |
|
94 | ix_time = str(ix_time) | |
95 | add_key(ix_time, daily_partitions) |
|
95 | add_key(ix_time, daily_partitions) | |
96 | daily_partitions[ix_time]['pg'].append(row['table_name']) |
|
96 | daily_partitions[ix_time]['pg'].append(row['table_name']) | |
97 | else: |
|
97 | else: | |
98 | ix_time = datetime(split_data[0], split_data[1], 1).date() |
|
98 | ix_time = datetime(split_data[0], split_data[1], 1).date() | |
99 | ix_time = str(ix_time) |
|
99 | ix_time = str(ix_time) | |
100 | add_key(ix_time, permanent_partitions) |
|
100 | add_key(ix_time, permanent_partitions) | |
101 | permanent_partitions[ix_time]['pg'].append(row['table_name']) |
|
101 | permanent_partitions[ix_time]['pg'].append(row['table_name']) | |
102 |
|
102 | |||
103 | return permanent_partitions, daily_partitions |
|
103 | return permanent_partitions, daily_partitions | |
104 |
|
104 | |||
105 |
|
105 | |||
106 | @view_config(route_name='section_view', permission='root_administration', |
|
106 | @view_config(route_name='section_view', permission='root_administration', | |
107 | match_param=['section=admin_section', 'view=partitions'], |
|
107 | match_param=['section=admin_section', 'view=partitions'], | |
108 | renderer='json', request_method='GET') |
|
108 | renderer='json', request_method='GET') | |
109 | def index(request): |
|
109 | def index(request): | |
110 | permanent_partitions, daily_partitions = get_partition_stats() |
|
110 | permanent_partitions, daily_partitions = get_partition_stats() | |
111 |
|
111 | |||
112 | return {"permanent_partitions": sorted(list(permanent_partitions.items()), |
|
112 | return {"permanent_partitions": sorted(list(permanent_partitions.items()), | |
113 | key=lambda x: x[0], reverse=True), |
|
113 | key=lambda x: x[0], reverse=True), | |
114 | "daily_partitions": sorted(list(daily_partitions.items()), |
|
114 | "daily_partitions": sorted(list(daily_partitions.items()), | |
115 | key=lambda x: x[0], reverse=True)} |
|
115 | key=lambda x: x[0], reverse=True)} | |
116 |
|
116 | |||
117 |
|
117 | |||
118 | @view_config(route_name='section_view', request_method='POST', |
|
118 | @view_config(route_name='section_view', request_method='POST', | |
119 | match_param=['section=admin_section', 'view=partitions_remove'], |
|
119 | match_param=['section=admin_section', 'view=partitions_remove'], | |
120 | renderer='json', permission='root_administration') |
|
120 | renderer='json', permission='root_administration') | |
121 | def partitions_remove(request): |
|
121 | def partitions_remove(request): | |
122 | permanent_partitions, daily_partitions = get_partition_stats() |
|
122 | permanent_partitions, daily_partitions = get_partition_stats() | |
123 | pg_partitions = [] |
|
123 | pg_partitions = [] | |
124 | es_partitions = [] |
|
124 | es_partitions = [] | |
125 | for item in list(permanent_partitions.values()) + list(daily_partitions.values()): |
|
125 | for item in list(permanent_partitions.values()) + list(daily_partitions.values()): | |
126 | es_partitions.extend(item['elasticsearch']) |
|
126 | es_partitions.extend(item['elasticsearch']) | |
127 | pg_partitions.extend(item['pg']) |
|
127 | pg_partitions.extend(item['pg']) | |
128 | FormCls = get_partition_deletion_form(es_partitions, pg_partitions) |
|
128 | FormCls = get_partition_deletion_form(es_partitions, pg_partitions) | |
129 | form = FormCls(es_index=request.unsafe_json_body['es_indices'], |
|
129 | form = FormCls(es_index=request.unsafe_json_body['es_indices'], | |
130 | pg_index=request.unsafe_json_body['pg_indices'], |
|
130 | pg_index=request.unsafe_json_body['pg_indices'], | |
131 | confirm=request.unsafe_json_body['confirm'], |
|
131 | confirm=request.unsafe_json_body['confirm'], | |
132 | csrf_context=request) |
|
132 | csrf_context=request) | |
133 | if form.validate(): |
|
133 | if form.validate(): | |
134 | for ix in form.data['es_index']: |
|
134 | for ix in form.data['es_index']: | |
|
135 | log.warning('deleting ES partition: {}'.format(ix)) | |||
135 | Datastores.es.delete_index(ix) |
|
136 | Datastores.es.delete_index(ix) | |
136 | for ix in form.data['pg_index']: |
|
137 | for ix in form.data['pg_index']: | |
|
138 | log.warning('deleting PG partition: {}'.format(ix)) | |||
137 | stmt = sa.text('DROP TABLE %s CASCADE' % sa.text(ix)) |
|
139 | stmt = sa.text('DROP TABLE %s CASCADE' % sa.text(ix)) | |
138 | session = DBSession() |
|
140 | session = DBSession() | |
139 | session.connection().execute(stmt) |
|
141 | session.connection().execute(stmt) | |
140 | mark_changed(session) |
|
142 | mark_changed(session) | |
141 |
|
143 | |||
142 | for field, error in form.errors.items(): |
|
144 | for field, error in form.errors.items(): | |
143 | msg = '%s: %s' % (field, error[0]) |
|
145 | msg = '%s: %s' % (field, error[0]) | |
144 | request.session.flash(msg, 'error') |
|
146 | request.session.flash(msg, 'error') | |
145 |
|
147 | |||
146 | permanent_partitions, daily_partitions = get_partition_stats() |
|
148 | permanent_partitions, daily_partitions = get_partition_stats() | |
147 | return { |
|
149 | return { | |
148 | "permanent_partitions": sorted( |
|
150 | "permanent_partitions": sorted( | |
149 | list(permanent_partitions.items()), key=lambda x: x[0], reverse=True), |
|
151 | list(permanent_partitions.items()), key=lambda x: x[0], reverse=True), | |
150 | "daily_partitions": sorted( |
|
152 | "daily_partitions": sorted( | |
151 | list(daily_partitions.items()), key=lambda x: x[0], reverse=True)} |
|
153 | list(daily_partitions.items()), key=lambda x: x[0], reverse=True)} |
General Comments 0
You need to be logged in to leave comments.
Login now