Show More
The requested changes are too big and content was truncated. Show full diff
1 | NO CONTENT: new file 100644 |
|
NO CONTENT: new file 100644 | ||
The requested commit or file is too big and content was truncated. Show full diff |
@@ -1,57 +1,57 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2019 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2019 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import os |
|
21 | import os | |
22 | import sys |
|
22 | import sys | |
23 | import platform |
|
23 | import platform | |
24 |
|
24 | |||
25 | VERSION = tuple(open(os.path.join( |
|
25 | VERSION = tuple(open(os.path.join( | |
26 | os.path.dirname(__file__), 'VERSION')).read().split('.')) |
|
26 | os.path.dirname(__file__), 'VERSION')).read().split('.')) | |
27 |
|
27 | |||
28 | BACKENDS = { |
|
28 | BACKENDS = { | |
29 | 'hg': 'Mercurial repository', |
|
29 | 'hg': 'Mercurial repository', | |
30 | 'git': 'Git repository', |
|
30 | 'git': 'Git repository', | |
31 | 'svn': 'Subversion repository', |
|
31 | 'svn': 'Subversion repository', | |
32 | } |
|
32 | } | |
33 |
|
33 | |||
34 | CELERY_ENABLED = False |
|
34 | CELERY_ENABLED = False | |
35 | CELERY_EAGER = False |
|
35 | CELERY_EAGER = False | |
36 |
|
36 | |||
37 | # link to config for pyramid |
|
37 | # link to config for pyramid | |
38 | CONFIG = {} |
|
38 | CONFIG = {} | |
39 |
|
39 | |||
40 | # Populated with the settings dictionary from application init in |
|
40 | # Populated with the settings dictionary from application init in | |
41 | # rhodecode.conf.environment.load_pyramid_environment |
|
41 | # rhodecode.conf.environment.load_pyramid_environment | |
42 | PYRAMID_SETTINGS = {} |
|
42 | PYRAMID_SETTINGS = {} | |
43 |
|
43 | |||
44 | # Linked module for extensions |
|
44 | # Linked module for extensions | |
45 | EXTENSIONS = {} |
|
45 | EXTENSIONS = {} | |
46 |
|
46 | |||
47 | __version__ = ('.'.join((str(each) for each in VERSION[:3]))) |
|
47 | __version__ = ('.'.join((str(each) for each in VERSION[:3]))) | |
48 |
__dbversion__ = 9 |
|
48 | __dbversion__ = 98 # defines current db version for migrations | |
49 | __platform__ = platform.system() |
|
49 | __platform__ = platform.system() | |
50 | __license__ = 'AGPLv3, and Commercial License' |
|
50 | __license__ = 'AGPLv3, and Commercial License' | |
51 | __author__ = 'RhodeCode GmbH' |
|
51 | __author__ = 'RhodeCode GmbH' | |
52 | __url__ = 'https://code.rhodecode.com' |
|
52 | __url__ = 'https://code.rhodecode.com' | |
53 |
|
53 | |||
54 | is_windows = __platform__ in ['Windows'] |
|
54 | is_windows = __platform__ in ['Windows'] | |
55 | is_unix = not is_windows |
|
55 | is_unix = not is_windows | |
56 | is_test = False |
|
56 | is_test = False | |
57 | disable_error_handler = False |
|
57 | disable_error_handler = False |
@@ -1,746 +1,747 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2016-2019 RhodeCode GmbH |
|
3 | # Copyright (C) 2016-2019 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import re |
|
21 | import re | |
22 | import logging |
|
22 | import logging | |
23 | import collections |
|
23 | import collections | |
24 |
|
24 | |||
25 | from pyramid.view import view_config |
|
25 | from pyramid.view import view_config | |
26 |
|
26 | |||
27 | from rhodecode.apps._base import BaseAppView |
|
27 | from rhodecode.apps._base import BaseAppView | |
28 | from rhodecode.lib import helpers as h |
|
28 | from rhodecode.lib import helpers as h | |
29 | from rhodecode.lib.auth import ( |
|
29 | from rhodecode.lib.auth import ( | |
30 | LoginRequired, NotAnonymous, HasRepoGroupPermissionAnyDecorator, |
|
30 | LoginRequired, NotAnonymous, HasRepoGroupPermissionAnyDecorator, | |
31 | CSRFRequired) |
|
31 | CSRFRequired) | |
32 | from rhodecode.lib.index import searcher_from_config |
|
32 | from rhodecode.lib.index import searcher_from_config | |
33 | from rhodecode.lib.utils2 import safe_unicode, str2bool, safe_int |
|
33 | from rhodecode.lib.utils2 import safe_unicode, str2bool, safe_int | |
34 | from rhodecode.lib.ext_json import json |
|
34 | from rhodecode.lib.ext_json import json | |
35 | from rhodecode.model.db import ( |
|
35 | from rhodecode.model.db import ( | |
36 | func, true, or_, case, in_filter_generator, Repository, RepoGroup, User, UserGroup) |
|
36 | func, true, or_, case, in_filter_generator, Repository, RepoGroup, User, UserGroup) | |
37 | from rhodecode.model.repo import RepoModel |
|
37 | from rhodecode.model.repo import RepoModel | |
38 | from rhodecode.model.repo_group import RepoGroupModel |
|
38 | from rhodecode.model.repo_group import RepoGroupModel | |
39 | from rhodecode.model.scm import RepoGroupList, RepoList |
|
39 | from rhodecode.model.scm import RepoGroupList, RepoList | |
40 | from rhodecode.model.user import UserModel |
|
40 | from rhodecode.model.user import UserModel | |
41 | from rhodecode.model.user_group import UserGroupModel |
|
41 | from rhodecode.model.user_group import UserGroupModel | |
42 |
|
42 | |||
43 | log = logging.getLogger(__name__) |
|
43 | log = logging.getLogger(__name__) | |
44 |
|
44 | |||
45 |
|
45 | |||
46 | class HomeView(BaseAppView): |
|
46 | class HomeView(BaseAppView): | |
47 |
|
47 | |||
48 | def load_default_context(self): |
|
48 | def load_default_context(self): | |
49 | c = self._get_local_tmpl_context() |
|
49 | c = self._get_local_tmpl_context() | |
50 | c.user = c.auth_user.get_instance() |
|
50 | c.user = c.auth_user.get_instance() | |
51 |
|
51 | |||
52 | return c |
|
52 | return c | |
53 |
|
53 | |||
54 | @LoginRequired() |
|
54 | @LoginRequired() | |
55 | @view_config( |
|
55 | @view_config( | |
56 | route_name='user_autocomplete_data', request_method='GET', |
|
56 | route_name='user_autocomplete_data', request_method='GET', | |
57 | renderer='json_ext', xhr=True) |
|
57 | renderer='json_ext', xhr=True) | |
58 | def user_autocomplete_data(self): |
|
58 | def user_autocomplete_data(self): | |
59 | self.load_default_context() |
|
59 | self.load_default_context() | |
60 | query = self.request.GET.get('query') |
|
60 | query = self.request.GET.get('query') | |
61 | active = str2bool(self.request.GET.get('active') or True) |
|
61 | active = str2bool(self.request.GET.get('active') or True) | |
62 | include_groups = str2bool(self.request.GET.get('user_groups')) |
|
62 | include_groups = str2bool(self.request.GET.get('user_groups')) | |
63 | expand_groups = str2bool(self.request.GET.get('user_groups_expand')) |
|
63 | expand_groups = str2bool(self.request.GET.get('user_groups_expand')) | |
64 | skip_default_user = str2bool(self.request.GET.get('skip_default_user')) |
|
64 | skip_default_user = str2bool(self.request.GET.get('skip_default_user')) | |
65 |
|
65 | |||
66 | log.debug('generating user list, query:%s, active:%s, with_groups:%s', |
|
66 | log.debug('generating user list, query:%s, active:%s, with_groups:%s', | |
67 | query, active, include_groups) |
|
67 | query, active, include_groups) | |
68 |
|
68 | |||
69 | _users = UserModel().get_users( |
|
69 | _users = UserModel().get_users( | |
70 | name_contains=query, only_active=active) |
|
70 | name_contains=query, only_active=active) | |
71 |
|
71 | |||
72 | def maybe_skip_default_user(usr): |
|
72 | def maybe_skip_default_user(usr): | |
73 | if skip_default_user and usr['username'] == UserModel.cls.DEFAULT_USER: |
|
73 | if skip_default_user and usr['username'] == UserModel.cls.DEFAULT_USER: | |
74 | return False |
|
74 | return False | |
75 | return True |
|
75 | return True | |
76 | _users = filter(maybe_skip_default_user, _users) |
|
76 | _users = filter(maybe_skip_default_user, _users) | |
77 |
|
77 | |||
78 | if include_groups: |
|
78 | if include_groups: | |
79 | # extend with user groups |
|
79 | # extend with user groups | |
80 | _user_groups = UserGroupModel().get_user_groups( |
|
80 | _user_groups = UserGroupModel().get_user_groups( | |
81 | name_contains=query, only_active=active, |
|
81 | name_contains=query, only_active=active, | |
82 | expand_groups=expand_groups) |
|
82 | expand_groups=expand_groups) | |
83 | _users = _users + _user_groups |
|
83 | _users = _users + _user_groups | |
84 |
|
84 | |||
85 | return {'suggestions': _users} |
|
85 | return {'suggestions': _users} | |
86 |
|
86 | |||
87 | @LoginRequired() |
|
87 | @LoginRequired() | |
88 | @NotAnonymous() |
|
88 | @NotAnonymous() | |
89 | @view_config( |
|
89 | @view_config( | |
90 | route_name='user_group_autocomplete_data', request_method='GET', |
|
90 | route_name='user_group_autocomplete_data', request_method='GET', | |
91 | renderer='json_ext', xhr=True) |
|
91 | renderer='json_ext', xhr=True) | |
92 | def user_group_autocomplete_data(self): |
|
92 | def user_group_autocomplete_data(self): | |
93 | self.load_default_context() |
|
93 | self.load_default_context() | |
94 | query = self.request.GET.get('query') |
|
94 | query = self.request.GET.get('query') | |
95 | active = str2bool(self.request.GET.get('active') or True) |
|
95 | active = str2bool(self.request.GET.get('active') or True) | |
96 | expand_groups = str2bool(self.request.GET.get('user_groups_expand')) |
|
96 | expand_groups = str2bool(self.request.GET.get('user_groups_expand')) | |
97 |
|
97 | |||
98 | log.debug('generating user group list, query:%s, active:%s', |
|
98 | log.debug('generating user group list, query:%s, active:%s', | |
99 | query, active) |
|
99 | query, active) | |
100 |
|
100 | |||
101 | _user_groups = UserGroupModel().get_user_groups( |
|
101 | _user_groups = UserGroupModel().get_user_groups( | |
102 | name_contains=query, only_active=active, |
|
102 | name_contains=query, only_active=active, | |
103 | expand_groups=expand_groups) |
|
103 | expand_groups=expand_groups) | |
104 | _user_groups = _user_groups |
|
104 | _user_groups = _user_groups | |
105 |
|
105 | |||
106 | return {'suggestions': _user_groups} |
|
106 | return {'suggestions': _user_groups} | |
107 |
|
107 | |||
108 | def _get_repo_list(self, name_contains=None, repo_type=None, repo_group_name='', limit=20): |
|
108 | def _get_repo_list(self, name_contains=None, repo_type=None, repo_group_name='', limit=20): | |
109 | org_query = name_contains |
|
109 | org_query = name_contains | |
110 | allowed_ids = self._rhodecode_user.repo_acl_ids( |
|
110 | allowed_ids = self._rhodecode_user.repo_acl_ids( | |
111 | ['repository.read', 'repository.write', 'repository.admin'], |
|
111 | ['repository.read', 'repository.write', 'repository.admin'], | |
112 | cache=False, name_filter=name_contains) or [-1] |
|
112 | cache=False, name_filter=name_contains) or [-1] | |
113 |
|
113 | |||
114 | query = Repository.query()\ |
|
114 | query = Repository.query()\ | |
115 | .filter(Repository.archived.isnot(true()))\ |
|
115 | .filter(Repository.archived.isnot(true()))\ | |
116 | .filter(or_( |
|
116 | .filter(or_( | |
117 | # generate multiple IN to fix limitation problems |
|
117 | # generate multiple IN to fix limitation problems | |
118 | *in_filter_generator(Repository.repo_id, allowed_ids) |
|
118 | *in_filter_generator(Repository.repo_id, allowed_ids) | |
119 | )) |
|
119 | )) | |
120 |
|
120 | |||
121 | query = query.order_by(case( |
|
121 | query = query.order_by(case( | |
122 | [ |
|
122 | [ | |
123 | (Repository.repo_name.startswith(repo_group_name), repo_group_name+'/'), |
|
123 | (Repository.repo_name.startswith(repo_group_name), repo_group_name+'/'), | |
124 | ], |
|
124 | ], | |
125 | )) |
|
125 | )) | |
126 | query = query.order_by(func.length(Repository.repo_name)) |
|
126 | query = query.order_by(func.length(Repository.repo_name)) | |
127 | query = query.order_by(Repository.repo_name) |
|
127 | query = query.order_by(Repository.repo_name) | |
128 |
|
128 | |||
129 | if repo_type: |
|
129 | if repo_type: | |
130 | query = query.filter(Repository.repo_type == repo_type) |
|
130 | query = query.filter(Repository.repo_type == repo_type) | |
131 |
|
131 | |||
132 | if name_contains: |
|
132 | if name_contains: | |
133 | ilike_expression = u'%{}%'.format(safe_unicode(name_contains)) |
|
133 | ilike_expression = u'%{}%'.format(safe_unicode(name_contains)) | |
134 | query = query.filter( |
|
134 | query = query.filter( | |
135 | Repository.repo_name.ilike(ilike_expression)) |
|
135 | Repository.repo_name.ilike(ilike_expression)) | |
136 | query = query.limit(limit) |
|
136 | query = query.limit(limit) | |
137 |
|
137 | |||
138 | acl_iter = query |
|
138 | acl_iter = query | |
139 |
|
139 | |||
140 | return [ |
|
140 | return [ | |
141 | { |
|
141 | { | |
142 | 'id': obj.repo_name, |
|
142 | 'id': obj.repo_name, | |
143 | 'value': org_query, |
|
143 | 'value': org_query, | |
144 | 'value_display': obj.repo_name, |
|
144 | 'value_display': obj.repo_name, | |
145 | 'text': obj.repo_name, |
|
145 | 'text': obj.repo_name, | |
146 | 'type': 'repo', |
|
146 | 'type': 'repo', | |
147 | 'repo_id': obj.repo_id, |
|
147 | 'repo_id': obj.repo_id, | |
148 | 'repo_type': obj.repo_type, |
|
148 | 'repo_type': obj.repo_type, | |
149 | 'private': obj.private, |
|
149 | 'private': obj.private, | |
150 | 'url': h.route_path('repo_summary', repo_name=obj.repo_name) |
|
150 | 'url': h.route_path('repo_summary', repo_name=obj.repo_name) | |
151 | } |
|
151 | } | |
152 | for obj in acl_iter] |
|
152 | for obj in acl_iter] | |
153 |
|
153 | |||
154 | def _get_repo_group_list(self, name_contains=None, repo_group_name='', limit=20): |
|
154 | def _get_repo_group_list(self, name_contains=None, repo_group_name='', limit=20): | |
155 | org_query = name_contains |
|
155 | org_query = name_contains | |
156 | allowed_ids = self._rhodecode_user.repo_group_acl_ids( |
|
156 | allowed_ids = self._rhodecode_user.repo_group_acl_ids( | |
157 | ['group.read', 'group.write', 'group.admin'], |
|
157 | ['group.read', 'group.write', 'group.admin'], | |
158 | cache=False, name_filter=name_contains) or [-1] |
|
158 | cache=False, name_filter=name_contains) or [-1] | |
159 |
|
159 | |||
160 | query = RepoGroup.query()\ |
|
160 | query = RepoGroup.query()\ | |
161 | .filter(or_( |
|
161 | .filter(or_( | |
162 | # generate multiple IN to fix limitation problems |
|
162 | # generate multiple IN to fix limitation problems | |
163 | *in_filter_generator(RepoGroup.group_id, allowed_ids) |
|
163 | *in_filter_generator(RepoGroup.group_id, allowed_ids) | |
164 | )) |
|
164 | )) | |
165 |
|
165 | |||
166 | query = query.order_by(case( |
|
166 | query = query.order_by(case( | |
167 | [ |
|
167 | [ | |
168 | (RepoGroup.group_name.startswith(repo_group_name), repo_group_name+'/'), |
|
168 | (RepoGroup.group_name.startswith(repo_group_name), repo_group_name+'/'), | |
169 | ], |
|
169 | ], | |
170 | )) |
|
170 | )) | |
171 | query = query.order_by(func.length(RepoGroup.group_name)) |
|
171 | query = query.order_by(func.length(RepoGroup.group_name)) | |
172 | query = query.order_by(RepoGroup.group_name) |
|
172 | query = query.order_by(RepoGroup.group_name) | |
173 |
|
173 | |||
174 | if name_contains: |
|
174 | if name_contains: | |
175 | ilike_expression = u'%{}%'.format(safe_unicode(name_contains)) |
|
175 | ilike_expression = u'%{}%'.format(safe_unicode(name_contains)) | |
176 | query = query.filter( |
|
176 | query = query.filter( | |
177 | RepoGroup.group_name.ilike(ilike_expression)) |
|
177 | RepoGroup.group_name.ilike(ilike_expression)) | |
178 | query = query.limit(limit) |
|
178 | query = query.limit(limit) | |
179 |
|
179 | |||
180 | acl_iter = query |
|
180 | acl_iter = query | |
181 |
|
181 | |||
182 | return [ |
|
182 | return [ | |
183 | { |
|
183 | { | |
184 | 'id': obj.group_name, |
|
184 | 'id': obj.group_name, | |
185 | 'value': org_query, |
|
185 | 'value': org_query, | |
186 | 'value_display': obj.group_name, |
|
186 | 'value_display': obj.group_name, | |
187 | 'text': obj.group_name, |
|
187 | 'text': obj.group_name, | |
188 | 'type': 'repo_group', |
|
188 | 'type': 'repo_group', | |
189 | 'repo_group_id': obj.group_id, |
|
189 | 'repo_group_id': obj.group_id, | |
190 | 'url': h.route_path( |
|
190 | 'url': h.route_path( | |
191 | 'repo_group_home', repo_group_name=obj.group_name) |
|
191 | 'repo_group_home', repo_group_name=obj.group_name) | |
192 | } |
|
192 | } | |
193 | for obj in acl_iter] |
|
193 | for obj in acl_iter] | |
194 |
|
194 | |||
195 | def _get_user_list(self, name_contains=None, limit=20): |
|
195 | def _get_user_list(self, name_contains=None, limit=20): | |
196 | org_query = name_contains |
|
196 | org_query = name_contains | |
197 | if not name_contains: |
|
197 | if not name_contains: | |
198 | return [], False |
|
198 | return [], False | |
199 |
|
199 | |||
200 | # TODO(marcink): should all logged in users be allowed to search others? |
|
200 | # TODO(marcink): should all logged in users be allowed to search others? | |
201 | allowed_user_search = self._rhodecode_user.username != User.DEFAULT_USER |
|
201 | allowed_user_search = self._rhodecode_user.username != User.DEFAULT_USER | |
202 | if not allowed_user_search: |
|
202 | if not allowed_user_search: | |
203 | return [], False |
|
203 | return [], False | |
204 |
|
204 | |||
205 | name_contains = re.compile('(?:user:[ ]?)(.+)').findall(name_contains) |
|
205 | name_contains = re.compile('(?:user:[ ]?)(.+)').findall(name_contains) | |
206 | if len(name_contains) != 1: |
|
206 | if len(name_contains) != 1: | |
207 | return [], False |
|
207 | return [], False | |
208 |
|
208 | |||
209 | name_contains = name_contains[0] |
|
209 | name_contains = name_contains[0] | |
210 |
|
210 | |||
211 | query = User.query()\ |
|
211 | query = User.query()\ | |
212 | .order_by(func.length(User.username))\ |
|
212 | .order_by(func.length(User.username))\ | |
213 | .order_by(User.username) \ |
|
213 | .order_by(User.username) \ | |
214 | .filter(User.username != User.DEFAULT_USER) |
|
214 | .filter(User.username != User.DEFAULT_USER) | |
215 |
|
215 | |||
216 | if name_contains: |
|
216 | if name_contains: | |
217 | ilike_expression = u'%{}%'.format(safe_unicode(name_contains)) |
|
217 | ilike_expression = u'%{}%'.format(safe_unicode(name_contains)) | |
218 | query = query.filter( |
|
218 | query = query.filter( | |
219 | User.username.ilike(ilike_expression)) |
|
219 | User.username.ilike(ilike_expression)) | |
220 | query = query.limit(limit) |
|
220 | query = query.limit(limit) | |
221 |
|
221 | |||
222 | acl_iter = query |
|
222 | acl_iter = query | |
223 |
|
223 | |||
224 | return [ |
|
224 | return [ | |
225 | { |
|
225 | { | |
226 | 'id': obj.user_id, |
|
226 | 'id': obj.user_id, | |
227 | 'value': org_query, |
|
227 | 'value': org_query, | |
228 | 'value_display': 'user: `{}`'.format(obj.username), |
|
228 | 'value_display': 'user: `{}`'.format(obj.username), | |
229 | 'type': 'user', |
|
229 | 'type': 'user', | |
230 | 'icon_link': h.gravatar_url(obj.email, 30), |
|
230 | 'icon_link': h.gravatar_url(obj.email, 30), | |
231 | 'url': h.route_path( |
|
231 | 'url': h.route_path( | |
232 | 'user_profile', username=obj.username) |
|
232 | 'user_profile', username=obj.username) | |
233 | } |
|
233 | } | |
234 | for obj in acl_iter], True |
|
234 | for obj in acl_iter], True | |
235 |
|
235 | |||
236 | def _get_user_groups_list(self, name_contains=None, limit=20): |
|
236 | def _get_user_groups_list(self, name_contains=None, limit=20): | |
237 | org_query = name_contains |
|
237 | org_query = name_contains | |
238 | if not name_contains: |
|
238 | if not name_contains: | |
239 | return [], False |
|
239 | return [], False | |
240 |
|
240 | |||
241 | # TODO(marcink): should all logged in users be allowed to search others? |
|
241 | # TODO(marcink): should all logged in users be allowed to search others? | |
242 | allowed_user_search = self._rhodecode_user.username != User.DEFAULT_USER |
|
242 | allowed_user_search = self._rhodecode_user.username != User.DEFAULT_USER | |
243 | if not allowed_user_search: |
|
243 | if not allowed_user_search: | |
244 | return [], False |
|
244 | return [], False | |
245 |
|
245 | |||
246 | name_contains = re.compile('(?:user_group:[ ]?)(.+)').findall(name_contains) |
|
246 | name_contains = re.compile('(?:user_group:[ ]?)(.+)').findall(name_contains) | |
247 | if len(name_contains) != 1: |
|
247 | if len(name_contains) != 1: | |
248 | return [], False |
|
248 | return [], False | |
249 |
|
249 | |||
250 | name_contains = name_contains[0] |
|
250 | name_contains = name_contains[0] | |
251 |
|
251 | |||
252 | query = UserGroup.query()\ |
|
252 | query = UserGroup.query()\ | |
253 | .order_by(func.length(UserGroup.users_group_name))\ |
|
253 | .order_by(func.length(UserGroup.users_group_name))\ | |
254 | .order_by(UserGroup.users_group_name) |
|
254 | .order_by(UserGroup.users_group_name) | |
255 |
|
255 | |||
256 | if name_contains: |
|
256 | if name_contains: | |
257 | ilike_expression = u'%{}%'.format(safe_unicode(name_contains)) |
|
257 | ilike_expression = u'%{}%'.format(safe_unicode(name_contains)) | |
258 | query = query.filter( |
|
258 | query = query.filter( | |
259 | UserGroup.users_group_name.ilike(ilike_expression)) |
|
259 | UserGroup.users_group_name.ilike(ilike_expression)) | |
260 | query = query.limit(limit) |
|
260 | query = query.limit(limit) | |
261 |
|
261 | |||
262 | acl_iter = query |
|
262 | acl_iter = query | |
263 |
|
263 | |||
264 | return [ |
|
264 | return [ | |
265 | { |
|
265 | { | |
266 | 'id': obj.users_group_id, |
|
266 | 'id': obj.users_group_id, | |
267 | 'value': org_query, |
|
267 | 'value': org_query, | |
268 | 'value_display': 'user_group: `{}`'.format(obj.users_group_name), |
|
268 | 'value_display': 'user_group: `{}`'.format(obj.users_group_name), | |
269 | 'type': 'user_group', |
|
269 | 'type': 'user_group', | |
270 | 'url': h.route_path( |
|
270 | 'url': h.route_path( | |
271 | 'user_group_profile', user_group_name=obj.users_group_name) |
|
271 | 'user_group_profile', user_group_name=obj.users_group_name) | |
272 | } |
|
272 | } | |
273 | for obj in acl_iter], True |
|
273 | for obj in acl_iter], True | |
274 |
|
274 | |||
275 | def _get_hash_commit_list(self, auth_user, searcher, query, repo=None, repo_group=None): |
|
275 | def _get_hash_commit_list(self, auth_user, searcher, query, repo=None, repo_group=None): | |
276 | repo_name = repo_group_name = None |
|
276 | repo_name = repo_group_name = None | |
277 | if repo: |
|
277 | if repo: | |
278 | repo_name = repo.repo_name |
|
278 | repo_name = repo.repo_name | |
279 | if repo_group: |
|
279 | if repo_group: | |
280 | repo_group_name = repo_group.group_name |
|
280 | repo_group_name = repo_group.group_name | |
281 |
|
281 | |||
282 | org_query = query |
|
282 | org_query = query | |
283 | if not query or len(query) < 3 or not searcher: |
|
283 | if not query or len(query) < 3 or not searcher: | |
284 | return [], False |
|
284 | return [], False | |
285 |
|
285 | |||
286 | commit_hashes = re.compile('(?:commit:[ ]?)([0-9a-f]{2,40})').findall(query) |
|
286 | commit_hashes = re.compile('(?:commit:[ ]?)([0-9a-f]{2,40})').findall(query) | |
287 |
|
287 | |||
288 | if len(commit_hashes) != 1: |
|
288 | if len(commit_hashes) != 1: | |
289 | return [], False |
|
289 | return [], False | |
290 |
|
290 | |||
291 | commit_hash = commit_hashes[0] |
|
291 | commit_hash = commit_hashes[0] | |
292 |
|
292 | |||
293 | result = searcher.search( |
|
293 | result = searcher.search( | |
294 | 'commit_id:{}*'.format(commit_hash), 'commit', auth_user, |
|
294 | 'commit_id:{}*'.format(commit_hash), 'commit', auth_user, | |
295 | repo_name, repo_group_name, raise_on_exc=False) |
|
295 | repo_name, repo_group_name, raise_on_exc=False) | |
296 |
|
296 | |||
297 | commits = [] |
|
297 | commits = [] | |
298 | for entry in result['results']: |
|
298 | for entry in result['results']: | |
299 | repo_data = { |
|
299 | repo_data = { | |
300 | 'repository_id': entry.get('repository_id'), |
|
300 | 'repository_id': entry.get('repository_id'), | |
301 | 'repository_type': entry.get('repo_type'), |
|
301 | 'repository_type': entry.get('repo_type'), | |
302 | 'repository_name': entry.get('repository'), |
|
302 | 'repository_name': entry.get('repository'), | |
303 | } |
|
303 | } | |
304 |
|
304 | |||
305 | commit_entry = { |
|
305 | commit_entry = { | |
306 | 'id': entry['commit_id'], |
|
306 | 'id': entry['commit_id'], | |
307 | 'value': org_query, |
|
307 | 'value': org_query, | |
308 | 'value_display': '`{}` commit: {}'.format( |
|
308 | 'value_display': '`{}` commit: {}'.format( | |
309 | entry['repository'], entry['commit_id']), |
|
309 | entry['repository'], entry['commit_id']), | |
310 | 'type': 'commit', |
|
310 | 'type': 'commit', | |
311 | 'repo': entry['repository'], |
|
311 | 'repo': entry['repository'], | |
312 | 'repo_data': repo_data, |
|
312 | 'repo_data': repo_data, | |
313 |
|
313 | |||
314 | 'url': h.route_path( |
|
314 | 'url': h.route_path( | |
315 | 'repo_commit', |
|
315 | 'repo_commit', | |
316 | repo_name=entry['repository'], commit_id=entry['commit_id']) |
|
316 | repo_name=entry['repository'], commit_id=entry['commit_id']) | |
317 | } |
|
317 | } | |
318 |
|
318 | |||
319 | commits.append(commit_entry) |
|
319 | commits.append(commit_entry) | |
320 | return commits, True |
|
320 | return commits, True | |
321 |
|
321 | |||
322 | def _get_path_list(self, auth_user, searcher, query, repo=None, repo_group=None): |
|
322 | def _get_path_list(self, auth_user, searcher, query, repo=None, repo_group=None): | |
323 | repo_name = repo_group_name = None |
|
323 | repo_name = repo_group_name = None | |
324 | if repo: |
|
324 | if repo: | |
325 | repo_name = repo.repo_name |
|
325 | repo_name = repo.repo_name | |
326 | if repo_group: |
|
326 | if repo_group: | |
327 | repo_group_name = repo_group.group_name |
|
327 | repo_group_name = repo_group.group_name | |
328 |
|
328 | |||
329 | org_query = query |
|
329 | org_query = query | |
330 | if not query or len(query) < 3 or not searcher: |
|
330 | if not query or len(query) < 3 or not searcher: | |
331 | return [], False |
|
331 | return [], False | |
332 |
|
332 | |||
333 | paths_re = re.compile('(?:file:[ ]?)(.+)').findall(query) |
|
333 | paths_re = re.compile('(?:file:[ ]?)(.+)').findall(query) | |
334 | if len(paths_re) != 1: |
|
334 | if len(paths_re) != 1: | |
335 | return [], False |
|
335 | return [], False | |
336 |
|
336 | |||
337 | file_path = paths_re[0] |
|
337 | file_path = paths_re[0] | |
338 |
|
338 | |||
339 | search_path = searcher.escape_specials(file_path) |
|
339 | search_path = searcher.escape_specials(file_path) | |
340 | result = searcher.search( |
|
340 | result = searcher.search( | |
341 | 'file.raw:*{}*'.format(search_path), 'path', auth_user, |
|
341 | 'file.raw:*{}*'.format(search_path), 'path', auth_user, | |
342 | repo_name, repo_group_name, raise_on_exc=False) |
|
342 | repo_name, repo_group_name, raise_on_exc=False) | |
343 |
|
343 | |||
344 | files = [] |
|
344 | files = [] | |
345 | for entry in result['results']: |
|
345 | for entry in result['results']: | |
346 | repo_data = { |
|
346 | repo_data = { | |
347 | 'repository_id': entry.get('repository_id'), |
|
347 | 'repository_id': entry.get('repository_id'), | |
348 | 'repository_type': entry.get('repo_type'), |
|
348 | 'repository_type': entry.get('repo_type'), | |
349 | 'repository_name': entry.get('repository'), |
|
349 | 'repository_name': entry.get('repository'), | |
350 | } |
|
350 | } | |
351 |
|
351 | |||
352 | file_entry = { |
|
352 | file_entry = { | |
353 | 'id': entry['commit_id'], |
|
353 | 'id': entry['commit_id'], | |
354 | 'value': org_query, |
|
354 | 'value': org_query, | |
355 | 'value_display': '`{}` file: {}'.format( |
|
355 | 'value_display': '`{}` file: {}'.format( | |
356 | entry['repository'], entry['file']), |
|
356 | entry['repository'], entry['file']), | |
357 | 'type': 'file', |
|
357 | 'type': 'file', | |
358 | 'repo': entry['repository'], |
|
358 | 'repo': entry['repository'], | |
359 | 'repo_data': repo_data, |
|
359 | 'repo_data': repo_data, | |
360 |
|
360 | |||
361 | 'url': h.route_path( |
|
361 | 'url': h.route_path( | |
362 | 'repo_files', |
|
362 | 'repo_files', | |
363 | repo_name=entry['repository'], commit_id=entry['commit_id'], |
|
363 | repo_name=entry['repository'], commit_id=entry['commit_id'], | |
364 | f_path=entry['file']) |
|
364 | f_path=entry['file']) | |
365 | } |
|
365 | } | |
366 |
|
366 | |||
367 | files.append(file_entry) |
|
367 | files.append(file_entry) | |
368 | return files, True |
|
368 | return files, True | |
369 |
|
369 | |||
370 | @LoginRequired() |
|
370 | @LoginRequired() | |
371 | @view_config( |
|
371 | @view_config( | |
372 | route_name='repo_list_data', request_method='GET', |
|
372 | route_name='repo_list_data', request_method='GET', | |
373 | renderer='json_ext', xhr=True) |
|
373 | renderer='json_ext', xhr=True) | |
374 | def repo_list_data(self): |
|
374 | def repo_list_data(self): | |
375 | _ = self.request.translate |
|
375 | _ = self.request.translate | |
376 | self.load_default_context() |
|
376 | self.load_default_context() | |
377 |
|
377 | |||
378 | query = self.request.GET.get('query') |
|
378 | query = self.request.GET.get('query') | |
379 | repo_type = self.request.GET.get('repo_type') |
|
379 | repo_type = self.request.GET.get('repo_type') | |
380 | log.debug('generating repo list, query:%s, repo_type:%s', |
|
380 | log.debug('generating repo list, query:%s, repo_type:%s', | |
381 | query, repo_type) |
|
381 | query, repo_type) | |
382 |
|
382 | |||
383 | res = [] |
|
383 | res = [] | |
384 | repos = self._get_repo_list(query, repo_type=repo_type) |
|
384 | repos = self._get_repo_list(query, repo_type=repo_type) | |
385 | if repos: |
|
385 | if repos: | |
386 | res.append({ |
|
386 | res.append({ | |
387 | 'text': _('Repositories'), |
|
387 | 'text': _('Repositories'), | |
388 | 'children': repos |
|
388 | 'children': repos | |
389 | }) |
|
389 | }) | |
390 |
|
390 | |||
391 | data = { |
|
391 | data = { | |
392 | 'more': False, |
|
392 | 'more': False, | |
393 | 'results': res |
|
393 | 'results': res | |
394 | } |
|
394 | } | |
395 | return data |
|
395 | return data | |
396 |
|
396 | |||
397 | @LoginRequired() |
|
397 | @LoginRequired() | |
398 | @view_config( |
|
398 | @view_config( | |
399 | route_name='repo_group_list_data', request_method='GET', |
|
399 | route_name='repo_group_list_data', request_method='GET', | |
400 | renderer='json_ext', xhr=True) |
|
400 | renderer='json_ext', xhr=True) | |
401 | def repo_group_list_data(self): |
|
401 | def repo_group_list_data(self): | |
402 | _ = self.request.translate |
|
402 | _ = self.request.translate | |
403 | self.load_default_context() |
|
403 | self.load_default_context() | |
404 |
|
404 | |||
405 | query = self.request.GET.get('query') |
|
405 | query = self.request.GET.get('query') | |
406 |
|
406 | |||
407 | log.debug('generating repo group list, query:%s', |
|
407 | log.debug('generating repo group list, query:%s', | |
408 | query) |
|
408 | query) | |
409 |
|
409 | |||
410 | res = [] |
|
410 | res = [] | |
411 | repo_groups = self._get_repo_group_list(query) |
|
411 | repo_groups = self._get_repo_group_list(query) | |
412 | if repo_groups: |
|
412 | if repo_groups: | |
413 | res.append({ |
|
413 | res.append({ | |
414 | 'text': _('Repository Groups'), |
|
414 | 'text': _('Repository Groups'), | |
415 | 'children': repo_groups |
|
415 | 'children': repo_groups | |
416 | }) |
|
416 | }) | |
417 |
|
417 | |||
418 | data = { |
|
418 | data = { | |
419 | 'more': False, |
|
419 | 'more': False, | |
420 | 'results': res |
|
420 | 'results': res | |
421 | } |
|
421 | } | |
422 | return data |
|
422 | return data | |
423 |
|
423 | |||
424 | def _get_default_search_queries(self, search_context, searcher, query): |
|
424 | def _get_default_search_queries(self, search_context, searcher, query): | |
425 | if not searcher: |
|
425 | if not searcher: | |
426 | return [] |
|
426 | return [] | |
427 |
|
427 | |||
428 | is_es_6 = searcher.is_es_6 |
|
428 | is_es_6 = searcher.is_es_6 | |
429 |
|
429 | |||
430 | queries = [] |
|
430 | queries = [] | |
431 | repo_group_name, repo_name, repo_context = None, None, None |
|
431 | repo_group_name, repo_name, repo_context = None, None, None | |
432 |
|
432 | |||
433 | # repo group context |
|
433 | # repo group context | |
434 | if search_context.get('search_context[repo_group_name]'): |
|
434 | if search_context.get('search_context[repo_group_name]'): | |
435 | repo_group_name = search_context.get('search_context[repo_group_name]') |
|
435 | repo_group_name = search_context.get('search_context[repo_group_name]') | |
436 | if search_context.get('search_context[repo_name]'): |
|
436 | if search_context.get('search_context[repo_name]'): | |
437 | repo_name = search_context.get('search_context[repo_name]') |
|
437 | repo_name = search_context.get('search_context[repo_name]') | |
438 | repo_context = search_context.get('search_context[repo_view_type]') |
|
438 | repo_context = search_context.get('search_context[repo_view_type]') | |
439 |
|
439 | |||
440 | if is_es_6 and repo_name: |
|
440 | if is_es_6 and repo_name: | |
441 | # files |
|
441 | # files | |
442 | def query_modifier(): |
|
442 | def query_modifier(): | |
443 | qry = query |
|
443 | qry = query | |
444 | return {'q': qry, 'type': 'content'} |
|
444 | return {'q': qry, 'type': 'content'} | |
445 | label = u'File search for `{}` in this repository.'.format(query) |
|
445 | label = u'File search for `{}` in this repository.'.format(query) | |
446 | file_qry = { |
|
446 | file_qry = { | |
447 | 'id': -10, |
|
447 | 'id': -10, | |
448 | 'value': query, |
|
448 | 'value': query, | |
449 | 'value_display': label, |
|
449 | 'value_display': label, | |
450 | 'type': 'search', |
|
450 | 'type': 'search', | |
451 | 'url': h.route_path('search_repo', |
|
451 | 'url': h.route_path('search_repo', | |
452 | repo_name=repo_name, |
|
452 | repo_name=repo_name, | |
453 | _query=query_modifier()) |
|
453 | _query=query_modifier()) | |
454 | } |
|
454 | } | |
455 |
|
455 | |||
456 | # commits |
|
456 | # commits | |
457 | def query_modifier(): |
|
457 | def query_modifier(): | |
458 | qry = query |
|
458 | qry = query | |
459 | return {'q': qry, 'type': 'commit'} |
|
459 | return {'q': qry, 'type': 'commit'} | |
460 |
|
460 | |||
461 | label = u'Commit search for `{}` in this repository.'.format(query) |
|
461 | label = u'Commit search for `{}` in this repository.'.format(query) | |
462 | commit_qry = { |
|
462 | commit_qry = { | |
463 | 'id': -20, |
|
463 | 'id': -20, | |
464 | 'value': query, |
|
464 | 'value': query, | |
465 | 'value_display': label, |
|
465 | 'value_display': label, | |
466 | 'type': 'search', |
|
466 | 'type': 'search', | |
467 | 'url': h.route_path('search_repo', |
|
467 | 'url': h.route_path('search_repo', | |
468 | repo_name=repo_name, |
|
468 | repo_name=repo_name, | |
469 | _query=query_modifier()) |
|
469 | _query=query_modifier()) | |
470 | } |
|
470 | } | |
471 |
|
471 | |||
472 | if repo_context in ['commit', 'changelog']: |
|
472 | if repo_context in ['commit', 'changelog']: | |
473 | queries.extend([commit_qry, file_qry]) |
|
473 | queries.extend([commit_qry, file_qry]) | |
474 | elif repo_context in ['files', 'summary']: |
|
474 | elif repo_context in ['files', 'summary']: | |
475 | queries.extend([file_qry, commit_qry]) |
|
475 | queries.extend([file_qry, commit_qry]) | |
476 | else: |
|
476 | else: | |
477 | queries.extend([commit_qry, file_qry]) |
|
477 | queries.extend([commit_qry, file_qry]) | |
478 |
|
478 | |||
479 | elif is_es_6 and repo_group_name: |
|
479 | elif is_es_6 and repo_group_name: | |
480 | # files |
|
480 | # files | |
481 | def query_modifier(): |
|
481 | def query_modifier(): | |
482 | qry = query |
|
482 | qry = query | |
483 | return {'q': qry, 'type': 'content'} |
|
483 | return {'q': qry, 'type': 'content'} | |
484 |
|
484 | |||
485 | label = u'File search for `{}` in this repository group'.format(query) |
|
485 | label = u'File search for `{}` in this repository group'.format(query) | |
486 | file_qry = { |
|
486 | file_qry = { | |
487 | 'id': -30, |
|
487 | 'id': -30, | |
488 | 'value': query, |
|
488 | 'value': query, | |
489 | 'value_display': label, |
|
489 | 'value_display': label, | |
490 | 'type': 'search', |
|
490 | 'type': 'search', | |
491 | 'url': h.route_path('search_repo_group', |
|
491 | 'url': h.route_path('search_repo_group', | |
492 | repo_group_name=repo_group_name, |
|
492 | repo_group_name=repo_group_name, | |
493 | _query=query_modifier()) |
|
493 | _query=query_modifier()) | |
494 | } |
|
494 | } | |
495 |
|
495 | |||
496 | # commits |
|
496 | # commits | |
497 | def query_modifier(): |
|
497 | def query_modifier(): | |
498 | qry = query |
|
498 | qry = query | |
499 | return {'q': qry, 'type': 'commit'} |
|
499 | return {'q': qry, 'type': 'commit'} | |
500 |
|
500 | |||
501 | label = u'Commit search for `{}` in this repository group'.format(query) |
|
501 | label = u'Commit search for `{}` in this repository group'.format(query) | |
502 | commit_qry = { |
|
502 | commit_qry = { | |
503 | 'id': -40, |
|
503 | 'id': -40, | |
504 | 'value': query, |
|
504 | 'value': query, | |
505 | 'value_display': label, |
|
505 | 'value_display': label, | |
506 | 'type': 'search', |
|
506 | 'type': 'search', | |
507 | 'url': h.route_path('search_repo_group', |
|
507 | 'url': h.route_path('search_repo_group', | |
508 | repo_group_name=repo_group_name, |
|
508 | repo_group_name=repo_group_name, | |
509 | _query=query_modifier()) |
|
509 | _query=query_modifier()) | |
510 | } |
|
510 | } | |
511 |
|
511 | |||
512 | if repo_context in ['commit', 'changelog']: |
|
512 | if repo_context in ['commit', 'changelog']: | |
513 | queries.extend([commit_qry, file_qry]) |
|
513 | queries.extend([commit_qry, file_qry]) | |
514 | elif repo_context in ['files', 'summary']: |
|
514 | elif repo_context in ['files', 'summary']: | |
515 | queries.extend([file_qry, commit_qry]) |
|
515 | queries.extend([file_qry, commit_qry]) | |
516 | else: |
|
516 | else: | |
517 | queries.extend([commit_qry, file_qry]) |
|
517 | queries.extend([commit_qry, file_qry]) | |
518 |
|
518 | |||
519 | # Global, not scoped |
|
519 | # Global, not scoped | |
520 | if not queries: |
|
520 | if not queries: | |
521 | queries.append( |
|
521 | queries.append( | |
522 | { |
|
522 | { | |
523 | 'id': -1, |
|
523 | 'id': -1, | |
524 | 'value': query, |
|
524 | 'value': query, | |
525 | 'value_display': u'File search for: `{}`'.format(query), |
|
525 | 'value_display': u'File search for: `{}`'.format(query), | |
526 | 'type': 'search', |
|
526 | 'type': 'search', | |
527 | 'url': h.route_path('search', |
|
527 | 'url': h.route_path('search', | |
528 | _query={'q': query, 'type': 'content'}) |
|
528 | _query={'q': query, 'type': 'content'}) | |
529 | }) |
|
529 | }) | |
530 | queries.append( |
|
530 | queries.append( | |
531 | { |
|
531 | { | |
532 | 'id': -2, |
|
532 | 'id': -2, | |
533 | 'value': query, |
|
533 | 'value': query, | |
534 | 'value_display': u'Commit search for: `{}`'.format(query), |
|
534 | 'value_display': u'Commit search for: `{}`'.format(query), | |
535 | 'type': 'search', |
|
535 | 'type': 'search', | |
536 | 'url': h.route_path('search', |
|
536 | 'url': h.route_path('search', | |
537 | _query={'q': query, 'type': 'commit'}) |
|
537 | _query={'q': query, 'type': 'commit'}) | |
538 | }) |
|
538 | }) | |
539 |
|
539 | |||
540 | return queries |
|
540 | return queries | |
541 |
|
541 | |||
542 | @LoginRequired() |
|
542 | @LoginRequired() | |
543 | @view_config( |
|
543 | @view_config( | |
544 | route_name='goto_switcher_data', request_method='GET', |
|
544 | route_name='goto_switcher_data', request_method='GET', | |
545 | renderer='json_ext', xhr=True) |
|
545 | renderer='json_ext', xhr=True) | |
546 | def goto_switcher_data(self): |
|
546 | def goto_switcher_data(self): | |
547 | c = self.load_default_context() |
|
547 | c = self.load_default_context() | |
548 |
|
548 | |||
549 | _ = self.request.translate |
|
549 | _ = self.request.translate | |
550 |
|
550 | |||
551 | query = self.request.GET.get('query') |
|
551 | query = self.request.GET.get('query') | |
552 | log.debug('generating main filter data, query %s', query) |
|
552 | log.debug('generating main filter data, query %s', query) | |
553 |
|
553 | |||
554 | res = [] |
|
554 | res = [] | |
555 | if not query: |
|
555 | if not query: | |
556 | return {'suggestions': res} |
|
556 | return {'suggestions': res} | |
557 |
|
557 | |||
558 | def no_match(name): |
|
558 | def no_match(name): | |
559 | return { |
|
559 | return { | |
560 | 'id': -1, |
|
560 | 'id': -1, | |
561 | 'value': "", |
|
561 | 'value': "", | |
562 | 'value_display': name, |
|
562 | 'value_display': name, | |
563 | 'type': 'text', |
|
563 | 'type': 'text', | |
564 | 'url': "" |
|
564 | 'url': "" | |
565 | } |
|
565 | } | |
566 | searcher = searcher_from_config(self.request.registry.settings) |
|
566 | searcher = searcher_from_config(self.request.registry.settings) | |
567 | has_specialized_search = False |
|
567 | has_specialized_search = False | |
568 |
|
568 | |||
569 | # set repo context |
|
569 | # set repo context | |
570 | repo = None |
|
570 | repo = None | |
571 | repo_id = safe_int(self.request.GET.get('search_context[repo_id]')) |
|
571 | repo_id = safe_int(self.request.GET.get('search_context[repo_id]')) | |
572 | if repo_id: |
|
572 | if repo_id: | |
573 | repo = Repository.get(repo_id) |
|
573 | repo = Repository.get(repo_id) | |
574 |
|
574 | |||
575 | # set group context |
|
575 | # set group context | |
576 | repo_group = None |
|
576 | repo_group = None | |
577 | repo_group_id = safe_int(self.request.GET.get('search_context[repo_group_id]')) |
|
577 | repo_group_id = safe_int(self.request.GET.get('search_context[repo_group_id]')) | |
578 | if repo_group_id: |
|
578 | if repo_group_id: | |
579 | repo_group = RepoGroup.get(repo_group_id) |
|
579 | repo_group = RepoGroup.get(repo_group_id) | |
580 | prefix_match = False |
|
580 | prefix_match = False | |
581 |
|
581 | |||
582 | # user: type search |
|
582 | # user: type search | |
583 | if not prefix_match: |
|
583 | if not prefix_match: | |
584 | users, prefix_match = self._get_user_list(query) |
|
584 | users, prefix_match = self._get_user_list(query) | |
585 | if users: |
|
585 | if users: | |
586 | has_specialized_search = True |
|
586 | has_specialized_search = True | |
587 | for serialized_user in users: |
|
587 | for serialized_user in users: | |
588 | res.append(serialized_user) |
|
588 | res.append(serialized_user) | |
589 | elif prefix_match: |
|
589 | elif prefix_match: | |
590 | has_specialized_search = True |
|
590 | has_specialized_search = True | |
591 | res.append(no_match('No matching users found')) |
|
591 | res.append(no_match('No matching users found')) | |
592 |
|
592 | |||
593 | # user_group: type search |
|
593 | # user_group: type search | |
594 | if not prefix_match: |
|
594 | if not prefix_match: | |
595 | user_groups, prefix_match = self._get_user_groups_list(query) |
|
595 | user_groups, prefix_match = self._get_user_groups_list(query) | |
596 | if user_groups: |
|
596 | if user_groups: | |
597 | has_specialized_search = True |
|
597 | has_specialized_search = True | |
598 | for serialized_user_group in user_groups: |
|
598 | for serialized_user_group in user_groups: | |
599 | res.append(serialized_user_group) |
|
599 | res.append(serialized_user_group) | |
600 | elif prefix_match: |
|
600 | elif prefix_match: | |
601 | has_specialized_search = True |
|
601 | has_specialized_search = True | |
602 | res.append(no_match('No matching user groups found')) |
|
602 | res.append(no_match('No matching user groups found')) | |
603 |
|
603 | |||
604 | # FTS commit: type search |
|
604 | # FTS commit: type search | |
605 | if not prefix_match: |
|
605 | if not prefix_match: | |
606 | commits, prefix_match = self._get_hash_commit_list( |
|
606 | commits, prefix_match = self._get_hash_commit_list( | |
607 | c.auth_user, searcher, query, repo, repo_group) |
|
607 | c.auth_user, searcher, query, repo, repo_group) | |
608 | if commits: |
|
608 | if commits: | |
609 | has_specialized_search = True |
|
609 | has_specialized_search = True | |
610 | unique_repos = collections.OrderedDict() |
|
610 | unique_repos = collections.OrderedDict() | |
611 | for commit in commits: |
|
611 | for commit in commits: | |
612 | repo_name = commit['repo'] |
|
612 | repo_name = commit['repo'] | |
613 | unique_repos.setdefault(repo_name, []).append(commit) |
|
613 | unique_repos.setdefault(repo_name, []).append(commit) | |
614 |
|
614 | |||
615 | for _repo, commits in unique_repos.items(): |
|
615 | for _repo, commits in unique_repos.items(): | |
616 | for commit in commits: |
|
616 | for commit in commits: | |
617 | res.append(commit) |
|
617 | res.append(commit) | |
618 | elif prefix_match: |
|
618 | elif prefix_match: | |
619 | has_specialized_search = True |
|
619 | has_specialized_search = True | |
620 | res.append(no_match('No matching commits found')) |
|
620 | res.append(no_match('No matching commits found')) | |
621 |
|
621 | |||
622 | # FTS file: type search |
|
622 | # FTS file: type search | |
623 | if not prefix_match: |
|
623 | if not prefix_match: | |
624 | paths, prefix_match = self._get_path_list( |
|
624 | paths, prefix_match = self._get_path_list( | |
625 | c.auth_user, searcher, query, repo, repo_group) |
|
625 | c.auth_user, searcher, query, repo, repo_group) | |
626 | if paths: |
|
626 | if paths: | |
627 | has_specialized_search = True |
|
627 | has_specialized_search = True | |
628 | unique_repos = collections.OrderedDict() |
|
628 | unique_repos = collections.OrderedDict() | |
629 | for path in paths: |
|
629 | for path in paths: | |
630 | repo_name = path['repo'] |
|
630 | repo_name = path['repo'] | |
631 | unique_repos.setdefault(repo_name, []).append(path) |
|
631 | unique_repos.setdefault(repo_name, []).append(path) | |
632 |
|
632 | |||
633 | for repo, paths in unique_repos.items(): |
|
633 | for repo, paths in unique_repos.items(): | |
634 | for path in paths: |
|
634 | for path in paths: | |
635 | res.append(path) |
|
635 | res.append(path) | |
636 | elif prefix_match: |
|
636 | elif prefix_match: | |
637 | has_specialized_search = True |
|
637 | has_specialized_search = True | |
638 | res.append(no_match('No matching files found')) |
|
638 | res.append(no_match('No matching files found')) | |
639 |
|
639 | |||
640 | # main suggestions |
|
640 | # main suggestions | |
641 | if not has_specialized_search: |
|
641 | if not has_specialized_search: | |
642 | repo_group_name = '' |
|
642 | repo_group_name = '' | |
643 | if repo_group: |
|
643 | if repo_group: | |
644 | repo_group_name = repo_group.group_name |
|
644 | repo_group_name = repo_group.group_name | |
645 |
|
645 | |||
646 | for _q in self._get_default_search_queries(self.request.GET, searcher, query): |
|
646 | for _q in self._get_default_search_queries(self.request.GET, searcher, query): | |
647 | res.append(_q) |
|
647 | res.append(_q) | |
648 |
|
648 | |||
649 | repo_groups = self._get_repo_group_list(query, repo_group_name=repo_group_name) |
|
649 | repo_groups = self._get_repo_group_list(query, repo_group_name=repo_group_name) | |
650 | for serialized_repo_group in repo_groups: |
|
650 | for serialized_repo_group in repo_groups: | |
651 | res.append(serialized_repo_group) |
|
651 | res.append(serialized_repo_group) | |
652 |
|
652 | |||
653 | repos = self._get_repo_list(query, repo_group_name=repo_group_name) |
|
653 | repos = self._get_repo_list(query, repo_group_name=repo_group_name) | |
654 | for serialized_repo in repos: |
|
654 | for serialized_repo in repos: | |
655 | res.append(serialized_repo) |
|
655 | res.append(serialized_repo) | |
656 |
|
656 | |||
657 | if not repos and not repo_groups: |
|
657 | if not repos and not repo_groups: | |
658 | res.append(no_match('No matches found')) |
|
658 | res.append(no_match('No matches found')) | |
659 |
|
659 | |||
660 | return {'suggestions': res} |
|
660 | return {'suggestions': res} | |
661 |
|
661 | |||
662 | def _get_groups_and_repos(self, repo_group_id=None): |
|
662 | def _get_groups_and_repos(self, repo_group_id=None): | |
663 | # repo groups groups |
|
663 | # repo groups groups | |
664 | repo_group_list = RepoGroup.get_all_repo_groups(group_id=repo_group_id) |
|
664 | repo_group_list = RepoGroup.get_all_repo_groups(group_id=repo_group_id) | |
665 | _perms = ['group.read', 'group.write', 'group.admin'] |
|
665 | _perms = ['group.read', 'group.write', 'group.admin'] | |
666 | repo_group_list_acl = RepoGroupList(repo_group_list, perm_set=_perms) |
|
666 | repo_group_list_acl = RepoGroupList(repo_group_list, perm_set=_perms) | |
667 | repo_group_data = RepoGroupModel().get_repo_groups_as_dict( |
|
667 | repo_group_data = RepoGroupModel().get_repo_groups_as_dict( | |
668 | repo_group_list=repo_group_list_acl, admin=False) |
|
668 | repo_group_list=repo_group_list_acl, admin=False) | |
669 |
|
669 | |||
670 | # repositories |
|
670 | # repositories | |
671 | repo_list = Repository.get_all_repos(group_id=repo_group_id) |
|
671 | repo_list = Repository.get_all_repos(group_id=repo_group_id) | |
672 | _perms = ['repository.read', 'repository.write', 'repository.admin'] |
|
672 | _perms = ['repository.read', 'repository.write', 'repository.admin'] | |
673 | repo_list_acl = RepoList(repo_list, perm_set=_perms) |
|
673 | repo_list_acl = RepoList(repo_list, perm_set=_perms) | |
674 | repo_data = RepoModel().get_repos_as_dict( |
|
674 | repo_data = RepoModel().get_repos_as_dict( | |
675 | repo_list=repo_list_acl, admin=False) |
|
675 | repo_list=repo_list_acl, admin=False) | |
676 |
|
676 | |||
677 | return repo_data, repo_group_data |
|
677 | return repo_data, repo_group_data | |
678 |
|
678 | |||
679 | @LoginRequired() |
|
679 | @LoginRequired() | |
680 | @view_config( |
|
680 | @view_config( | |
681 | route_name='home', request_method='GET', |
|
681 | route_name='home', request_method='GET', | |
682 | renderer='rhodecode:templates/index.mako') |
|
682 | renderer='rhodecode:templates/index.mako') | |
683 | def main_page(self): |
|
683 | def main_page(self): | |
684 | c = self.load_default_context() |
|
684 | c = self.load_default_context() | |
685 | c.repo_group = None |
|
685 | c.repo_group = None | |
686 |
|
686 | |||
687 | repo_data, repo_group_data = self._get_groups_and_repos() |
|
687 | repo_data, repo_group_data = self._get_groups_and_repos() | |
688 | # json used to render the grids |
|
688 | # json used to render the grids | |
689 | c.repos_data = json.dumps(repo_data) |
|
689 | c.repos_data = json.dumps(repo_data) | |
690 | c.repo_groups_data = json.dumps(repo_group_data) |
|
690 | c.repo_groups_data = json.dumps(repo_group_data) | |
691 |
|
691 | |||
692 | return self._get_template_context(c) |
|
692 | return self._get_template_context(c) | |
693 |
|
693 | |||
694 | @LoginRequired() |
|
694 | @LoginRequired() | |
695 | @HasRepoGroupPermissionAnyDecorator( |
|
695 | @HasRepoGroupPermissionAnyDecorator( | |
696 | 'group.read', 'group.write', 'group.admin') |
|
696 | 'group.read', 'group.write', 'group.admin') | |
697 | @view_config( |
|
697 | @view_config( | |
698 | route_name='repo_group_home', request_method='GET', |
|
698 | route_name='repo_group_home', request_method='GET', | |
699 | renderer='rhodecode:templates/index_repo_group.mako') |
|
699 | renderer='rhodecode:templates/index_repo_group.mako') | |
700 | @view_config( |
|
700 | @view_config( | |
701 | route_name='repo_group_home_slash', request_method='GET', |
|
701 | route_name='repo_group_home_slash', request_method='GET', | |
702 | renderer='rhodecode:templates/index_repo_group.mako') |
|
702 | renderer='rhodecode:templates/index_repo_group.mako') | |
703 | def repo_group_main_page(self): |
|
703 | def repo_group_main_page(self): | |
704 | c = self.load_default_context() |
|
704 | c = self.load_default_context() | |
705 | c.repo_group = self.request.db_repo_group |
|
705 | c.repo_group = self.request.db_repo_group | |
706 | repo_data, repo_group_data = self._get_groups_and_repos( |
|
706 | repo_data, repo_group_data = self._get_groups_and_repos(c.repo_group.group_id) | |
707 | c.repo_group.group_id) |
|
707 | ||
|
708 | c.repo_group.update_commit_cache() | |||
708 |
|
709 | |||
709 | # json used to render the grids |
|
710 | # json used to render the grids | |
710 | c.repos_data = json.dumps(repo_data) |
|
711 | c.repos_data = json.dumps(repo_data) | |
711 | c.repo_groups_data = json.dumps(repo_group_data) |
|
712 | c.repo_groups_data = json.dumps(repo_group_data) | |
712 |
|
713 | |||
713 | return self._get_template_context(c) |
|
714 | return self._get_template_context(c) | |
714 |
|
715 | |||
715 | @LoginRequired() |
|
716 | @LoginRequired() | |
716 | @CSRFRequired() |
|
717 | @CSRFRequired() | |
717 | @view_config( |
|
718 | @view_config( | |
718 | route_name='markup_preview', request_method='POST', |
|
719 | route_name='markup_preview', request_method='POST', | |
719 | renderer='string', xhr=True) |
|
720 | renderer='string', xhr=True) | |
720 | def markup_preview(self): |
|
721 | def markup_preview(self): | |
721 | # Technically a CSRF token is not needed as no state changes with this |
|
722 | # Technically a CSRF token is not needed as no state changes with this | |
722 | # call. However, as this is a POST is better to have it, so automated |
|
723 | # call. However, as this is a POST is better to have it, so automated | |
723 | # tools don't flag it as potential CSRF. |
|
724 | # tools don't flag it as potential CSRF. | |
724 | # Post is required because the payload could be bigger than the maximum |
|
725 | # Post is required because the payload could be bigger than the maximum | |
725 | # allowed by GET. |
|
726 | # allowed by GET. | |
726 |
|
727 | |||
727 | text = self.request.POST.get('text') |
|
728 | text = self.request.POST.get('text') | |
728 | renderer = self.request.POST.get('renderer') or 'rst' |
|
729 | renderer = self.request.POST.get('renderer') or 'rst' | |
729 | if text: |
|
730 | if text: | |
730 | return h.render(text, renderer=renderer, mentions=True) |
|
731 | return h.render(text, renderer=renderer, mentions=True) | |
731 | return '' |
|
732 | return '' | |
732 |
|
733 | |||
733 | @LoginRequired() |
|
734 | @LoginRequired() | |
734 | @CSRFRequired() |
|
735 | @CSRFRequired() | |
735 | @view_config( |
|
736 | @view_config( | |
736 | route_name='store_user_session_value', request_method='POST', |
|
737 | route_name='store_user_session_value', request_method='POST', | |
737 | renderer='string', xhr=True) |
|
738 | renderer='string', xhr=True) | |
738 | def store_user_session_attr(self): |
|
739 | def store_user_session_attr(self): | |
739 | key = self.request.POST.get('key') |
|
740 | key = self.request.POST.get('key') | |
740 | val = self.request.POST.get('val') |
|
741 | val = self.request.POST.get('val') | |
741 |
|
742 | |||
742 | existing_value = self.request.session.get(key) |
|
743 | existing_value = self.request.session.get(key) | |
743 | if existing_value != val: |
|
744 | if existing_value != val: | |
744 | self.request.session[key] = val |
|
745 | self.request.session[key] = val | |
745 |
|
746 | |||
746 | return 'stored:{}:{}'.format(key, val) |
|
747 | return 'stored:{}:{}'.format(key, val) |
@@ -1,645 +1,642 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2019 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2019 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | """ |
|
21 | """ | |
22 | Database creation, and setup module for RhodeCode Enterprise. Used for creation |
|
22 | Database creation, and setup module for RhodeCode Enterprise. Used for creation | |
23 | of database as well as for migration operations |
|
23 | of database as well as for migration operations | |
24 | """ |
|
24 | """ | |
25 |
|
25 | |||
26 | import os |
|
26 | import os | |
27 | import sys |
|
27 | import sys | |
28 | import time |
|
28 | import time | |
29 | import uuid |
|
29 | import uuid | |
30 | import logging |
|
30 | import logging | |
31 | import getpass |
|
31 | import getpass | |
32 | from os.path import dirname as dn, join as jn |
|
32 | from os.path import dirname as dn, join as jn | |
33 |
|
33 | |||
34 | from sqlalchemy.engine import create_engine |
|
34 | from sqlalchemy.engine import create_engine | |
35 |
|
35 | |||
36 | from rhodecode import __dbversion__ |
|
36 | from rhodecode import __dbversion__ | |
37 | from rhodecode.model import init_model |
|
37 | from rhodecode.model import init_model | |
38 | from rhodecode.model.user import UserModel |
|
38 | from rhodecode.model.user import UserModel | |
39 | from rhodecode.model.db import ( |
|
39 | from rhodecode.model.db import ( | |
40 | User, Permission, RhodeCodeUi, RhodeCodeSetting, UserToPerm, |
|
40 | User, Permission, RhodeCodeUi, RhodeCodeSetting, UserToPerm, | |
41 | DbMigrateVersion, RepoGroup, UserRepoGroupToPerm, CacheKey, Repository) |
|
41 | DbMigrateVersion, RepoGroup, UserRepoGroupToPerm, CacheKey, Repository) | |
42 | from rhodecode.model.meta import Session, Base |
|
42 | from rhodecode.model.meta import Session, Base | |
43 | from rhodecode.model.permission import PermissionModel |
|
43 | from rhodecode.model.permission import PermissionModel | |
44 | from rhodecode.model.repo import RepoModel |
|
44 | from rhodecode.model.repo import RepoModel | |
45 | from rhodecode.model.repo_group import RepoGroupModel |
|
45 | from rhodecode.model.repo_group import RepoGroupModel | |
46 | from rhodecode.model.settings import SettingsModel |
|
46 | from rhodecode.model.settings import SettingsModel | |
47 |
|
47 | |||
48 |
|
48 | |||
49 | log = logging.getLogger(__name__) |
|
49 | log = logging.getLogger(__name__) | |
50 |
|
50 | |||
51 |
|
51 | |||
52 | def notify(msg): |
|
52 | def notify(msg): | |
53 | """ |
|
53 | """ | |
54 | Notification for migrations messages |
|
54 | Notification for migrations messages | |
55 | """ |
|
55 | """ | |
56 | ml = len(msg) + (4 * 2) |
|
56 | ml = len(msg) + (4 * 2) | |
57 | print(('\n%s\n*** %s ***\n%s' % ('*' * ml, msg, '*' * ml)).upper()) |
|
57 | print(('\n%s\n*** %s ***\n%s' % ('*' * ml, msg, '*' * ml)).upper()) | |
58 |
|
58 | |||
59 |
|
59 | |||
60 | class DbManage(object): |
|
60 | class DbManage(object): | |
61 |
|
61 | |||
62 | def __init__(self, log_sql, dbconf, root, tests=False, |
|
62 | def __init__(self, log_sql, dbconf, root, tests=False, | |
63 | SESSION=None, cli_args=None): |
|
63 | SESSION=None, cli_args=None): | |
64 | self.dbname = dbconf.split('/')[-1] |
|
64 | self.dbname = dbconf.split('/')[-1] | |
65 | self.tests = tests |
|
65 | self.tests = tests | |
66 | self.root = root |
|
66 | self.root = root | |
67 | self.dburi = dbconf |
|
67 | self.dburi = dbconf | |
68 | self.log_sql = log_sql |
|
68 | self.log_sql = log_sql | |
69 | self.db_exists = False |
|
69 | self.db_exists = False | |
70 | self.cli_args = cli_args or {} |
|
70 | self.cli_args = cli_args or {} | |
71 | self.init_db(SESSION=SESSION) |
|
71 | self.init_db(SESSION=SESSION) | |
72 | self.ask_ok = self.get_ask_ok_func(self.cli_args.get('force_ask')) |
|
72 | self.ask_ok = self.get_ask_ok_func(self.cli_args.get('force_ask')) | |
73 |
|
73 | |||
74 | def get_ask_ok_func(self, param): |
|
74 | def get_ask_ok_func(self, param): | |
75 | if param not in [None]: |
|
75 | if param not in [None]: | |
76 | # return a function lambda that has a default set to param |
|
76 | # return a function lambda that has a default set to param | |
77 | return lambda *args, **kwargs: param |
|
77 | return lambda *args, **kwargs: param | |
78 | else: |
|
78 | else: | |
79 | from rhodecode.lib.utils import ask_ok |
|
79 | from rhodecode.lib.utils import ask_ok | |
80 | return ask_ok |
|
80 | return ask_ok | |
81 |
|
81 | |||
82 | def init_db(self, SESSION=None): |
|
82 | def init_db(self, SESSION=None): | |
83 | if SESSION: |
|
83 | if SESSION: | |
84 | self.sa = SESSION |
|
84 | self.sa = SESSION | |
85 | else: |
|
85 | else: | |
86 | # init new sessions |
|
86 | # init new sessions | |
87 | engine = create_engine(self.dburi, echo=self.log_sql) |
|
87 | engine = create_engine(self.dburi, echo=self.log_sql) | |
88 | init_model(engine) |
|
88 | init_model(engine) | |
89 | self.sa = Session() |
|
89 | self.sa = Session() | |
90 |
|
90 | |||
91 | def create_tables(self, override=False): |
|
91 | def create_tables(self, override=False): | |
92 | """ |
|
92 | """ | |
93 | Create a auth database |
|
93 | Create a auth database | |
94 | """ |
|
94 | """ | |
95 |
|
95 | |||
96 | log.info("Existing database with the same name is going to be destroyed.") |
|
96 | log.info("Existing database with the same name is going to be destroyed.") | |
97 | log.info("Setup command will run DROP ALL command on that database.") |
|
97 | log.info("Setup command will run DROP ALL command on that database.") | |
98 | if self.tests: |
|
98 | if self.tests: | |
99 | destroy = True |
|
99 | destroy = True | |
100 | else: |
|
100 | else: | |
101 | destroy = self.ask_ok('Are you sure that you want to destroy the old database? [y/n]') |
|
101 | destroy = self.ask_ok('Are you sure that you want to destroy the old database? [y/n]') | |
102 | if not destroy: |
|
102 | if not destroy: | |
103 | log.info('Nothing done.') |
|
103 | log.info('Nothing done.') | |
104 | sys.exit(0) |
|
104 | sys.exit(0) | |
105 | if destroy: |
|
105 | if destroy: | |
106 | Base.metadata.drop_all() |
|
106 | Base.metadata.drop_all() | |
107 |
|
107 | |||
108 | checkfirst = not override |
|
108 | checkfirst = not override | |
109 | Base.metadata.create_all(checkfirst=checkfirst) |
|
109 | Base.metadata.create_all(checkfirst=checkfirst) | |
110 | log.info('Created tables for %s', self.dbname) |
|
110 | log.info('Created tables for %s', self.dbname) | |
111 |
|
111 | |||
112 | def set_db_version(self): |
|
112 | def set_db_version(self): | |
113 | ver = DbMigrateVersion() |
|
113 | ver = DbMigrateVersion() | |
114 | ver.version = __dbversion__ |
|
114 | ver.version = __dbversion__ | |
115 | ver.repository_id = 'rhodecode_db_migrations' |
|
115 | ver.repository_id = 'rhodecode_db_migrations' | |
116 | ver.repository_path = 'versions' |
|
116 | ver.repository_path = 'versions' | |
117 | self.sa.add(ver) |
|
117 | self.sa.add(ver) | |
118 | log.info('db version set to: %s', __dbversion__) |
|
118 | log.info('db version set to: %s', __dbversion__) | |
119 |
|
119 | |||
120 | def run_pre_migration_tasks(self): |
|
120 | def run_pre_migration_tasks(self): | |
121 | """ |
|
121 | """ | |
122 | Run various tasks before actually doing migrations |
|
122 | Run various tasks before actually doing migrations | |
123 | """ |
|
123 | """ | |
124 | # delete cache keys on each upgrade |
|
124 | # delete cache keys on each upgrade | |
125 | total = CacheKey.query().count() |
|
125 | total = CacheKey.query().count() | |
126 | log.info("Deleting (%s) cache keys now...", total) |
|
126 | log.info("Deleting (%s) cache keys now...", total) | |
127 | CacheKey.delete_all_cache() |
|
127 | CacheKey.delete_all_cache() | |
128 |
|
128 | |||
129 | def upgrade(self, version=None): |
|
129 | def upgrade(self, version=None): | |
130 | """ |
|
130 | """ | |
131 | Upgrades given database schema to given revision following |
|
131 | Upgrades given database schema to given revision following | |
132 | all needed steps, to perform the upgrade |
|
132 | all needed steps, to perform the upgrade | |
133 |
|
133 | |||
134 | """ |
|
134 | """ | |
135 |
|
135 | |||
136 | from rhodecode.lib.dbmigrate.migrate.versioning import api |
|
136 | from rhodecode.lib.dbmigrate.migrate.versioning import api | |
137 | from rhodecode.lib.dbmigrate.migrate.exceptions import \ |
|
137 | from rhodecode.lib.dbmigrate.migrate.exceptions import \ | |
138 | DatabaseNotControlledError |
|
138 | DatabaseNotControlledError | |
139 |
|
139 | |||
140 | if 'sqlite' in self.dburi: |
|
140 | if 'sqlite' in self.dburi: | |
141 | print( |
|
141 | print( | |
142 | '********************** WARNING **********************\n' |
|
142 | '********************** WARNING **********************\n' | |
143 | 'Make sure your version of sqlite is at least 3.7.X. \n' |
|
143 | 'Make sure your version of sqlite is at least 3.7.X. \n' | |
144 | 'Earlier versions are known to fail on some migrations\n' |
|
144 | 'Earlier versions are known to fail on some migrations\n' | |
145 | '*****************************************************\n') |
|
145 | '*****************************************************\n') | |
146 |
|
146 | |||
147 | upgrade = self.ask_ok( |
|
147 | upgrade = self.ask_ok( | |
148 | 'You are about to perform a database upgrade. Make ' |
|
148 | 'You are about to perform a database upgrade. Make ' | |
149 | 'sure you have backed up your database. ' |
|
149 | 'sure you have backed up your database. ' | |
150 | 'Continue ? [y/n]') |
|
150 | 'Continue ? [y/n]') | |
151 | if not upgrade: |
|
151 | if not upgrade: | |
152 | log.info('No upgrade performed') |
|
152 | log.info('No upgrade performed') | |
153 | sys.exit(0) |
|
153 | sys.exit(0) | |
154 |
|
154 | |||
155 | repository_path = jn(dn(dn(dn(os.path.realpath(__file__)))), |
|
155 | repository_path = jn(dn(dn(dn(os.path.realpath(__file__)))), | |
156 | 'rhodecode/lib/dbmigrate') |
|
156 | 'rhodecode/lib/dbmigrate') | |
157 | db_uri = self.dburi |
|
157 | db_uri = self.dburi | |
158 |
|
158 | |||
159 | if version: |
|
159 | if version: | |
160 | DbMigrateVersion.set_version(version) |
|
160 | DbMigrateVersion.set_version(version) | |
161 |
|
161 | |||
162 | try: |
|
162 | try: | |
163 | curr_version = api.db_version(db_uri, repository_path) |
|
163 | curr_version = api.db_version(db_uri, repository_path) | |
164 | msg = ('Found current database db_uri under version ' |
|
164 | msg = ('Found current database db_uri under version ' | |
165 | 'control with version {}'.format(curr_version)) |
|
165 | 'control with version {}'.format(curr_version)) | |
166 |
|
166 | |||
167 | except (RuntimeError, DatabaseNotControlledError): |
|
167 | except (RuntimeError, DatabaseNotControlledError): | |
168 | curr_version = 1 |
|
168 | curr_version = 1 | |
169 | msg = ('Current database is not under version control. Setting ' |
|
169 | msg = ('Current database is not under version control. Setting ' | |
170 | 'as version %s' % curr_version) |
|
170 | 'as version %s' % curr_version) | |
171 | api.version_control(db_uri, repository_path, curr_version) |
|
171 | api.version_control(db_uri, repository_path, curr_version) | |
172 |
|
172 | |||
173 | notify(msg) |
|
173 | notify(msg) | |
174 |
|
174 | |||
175 | self.run_pre_migration_tasks() |
|
175 | self.run_pre_migration_tasks() | |
176 |
|
176 | |||
177 | if curr_version == __dbversion__: |
|
177 | if curr_version == __dbversion__: | |
178 | log.info('This database is already at the newest version') |
|
178 | log.info('This database is already at the newest version') | |
179 | sys.exit(0) |
|
179 | sys.exit(0) | |
180 |
|
180 | |||
181 | upgrade_steps = range(curr_version + 1, __dbversion__ + 1) |
|
181 | upgrade_steps = range(curr_version + 1, __dbversion__ + 1) | |
182 | notify('attempting to upgrade database from ' |
|
182 | notify('attempting to upgrade database from ' | |
183 | 'version %s to version %s' % (curr_version, __dbversion__)) |
|
183 | 'version %s to version %s' % (curr_version, __dbversion__)) | |
184 |
|
184 | |||
185 | # CALL THE PROPER ORDER OF STEPS TO PERFORM FULL UPGRADE |
|
185 | # CALL THE PROPER ORDER OF STEPS TO PERFORM FULL UPGRADE | |
186 | _step = None |
|
186 | _step = None | |
187 | for step in upgrade_steps: |
|
187 | for step in upgrade_steps: | |
188 | notify('performing upgrade step %s' % step) |
|
188 | notify('performing upgrade step %s' % step) | |
189 | time.sleep(0.5) |
|
189 | time.sleep(0.5) | |
190 |
|
190 | |||
191 | api.upgrade(db_uri, repository_path, step) |
|
191 | api.upgrade(db_uri, repository_path, step) | |
192 | self.sa.rollback() |
|
192 | self.sa.rollback() | |
193 | notify('schema upgrade for step %s completed' % (step,)) |
|
193 | notify('schema upgrade for step %s completed' % (step,)) | |
194 |
|
194 | |||
195 | _step = step |
|
195 | _step = step | |
196 |
|
196 | |||
197 | notify('upgrade to version %s successful' % _step) |
|
197 | notify('upgrade to version %s successful' % _step) | |
198 |
|
198 | |||
199 | def fix_repo_paths(self): |
|
199 | def fix_repo_paths(self): | |
200 | """ |
|
200 | """ | |
201 | Fixes an old RhodeCode version path into new one without a '*' |
|
201 | Fixes an old RhodeCode version path into new one without a '*' | |
202 | """ |
|
202 | """ | |
203 |
|
203 | |||
204 | paths = self.sa.query(RhodeCodeUi)\ |
|
204 | paths = self.sa.query(RhodeCodeUi)\ | |
205 | .filter(RhodeCodeUi.ui_key == '/')\ |
|
205 | .filter(RhodeCodeUi.ui_key == '/')\ | |
206 | .scalar() |
|
206 | .scalar() | |
207 |
|
207 | |||
208 | paths.ui_value = paths.ui_value.replace('*', '') |
|
208 | paths.ui_value = paths.ui_value.replace('*', '') | |
209 |
|
209 | |||
210 | try: |
|
210 | try: | |
211 | self.sa.add(paths) |
|
211 | self.sa.add(paths) | |
212 | self.sa.commit() |
|
212 | self.sa.commit() | |
213 | except Exception: |
|
213 | except Exception: | |
214 | self.sa.rollback() |
|
214 | self.sa.rollback() | |
215 | raise |
|
215 | raise | |
216 |
|
216 | |||
217 | def fix_default_user(self): |
|
217 | def fix_default_user(self): | |
218 | """ |
|
218 | """ | |
219 | Fixes an old default user with some 'nicer' default values, |
|
219 | Fixes an old default user with some 'nicer' default values, | |
220 | used mostly for anonymous access |
|
220 | used mostly for anonymous access | |
221 | """ |
|
221 | """ | |
222 | def_user = self.sa.query(User)\ |
|
222 | def_user = self.sa.query(User)\ | |
223 | .filter(User.username == User.DEFAULT_USER)\ |
|
223 | .filter(User.username == User.DEFAULT_USER)\ | |
224 | .one() |
|
224 | .one() | |
225 |
|
225 | |||
226 | def_user.name = 'Anonymous' |
|
226 | def_user.name = 'Anonymous' | |
227 | def_user.lastname = 'User' |
|
227 | def_user.lastname = 'User' | |
228 | def_user.email = User.DEFAULT_USER_EMAIL |
|
228 | def_user.email = User.DEFAULT_USER_EMAIL | |
229 |
|
229 | |||
230 | try: |
|
230 | try: | |
231 | self.sa.add(def_user) |
|
231 | self.sa.add(def_user) | |
232 | self.sa.commit() |
|
232 | self.sa.commit() | |
233 | except Exception: |
|
233 | except Exception: | |
234 | self.sa.rollback() |
|
234 | self.sa.rollback() | |
235 | raise |
|
235 | raise | |
236 |
|
236 | |||
237 | def fix_settings(self): |
|
237 | def fix_settings(self): | |
238 | """ |
|
238 | """ | |
239 | Fixes rhodecode settings and adds ga_code key for google analytics |
|
239 | Fixes rhodecode settings and adds ga_code key for google analytics | |
240 | """ |
|
240 | """ | |
241 |
|
241 | |||
242 | hgsettings3 = RhodeCodeSetting('ga_code', '') |
|
242 | hgsettings3 = RhodeCodeSetting('ga_code', '') | |
243 |
|
243 | |||
244 | try: |
|
244 | try: | |
245 | self.sa.add(hgsettings3) |
|
245 | self.sa.add(hgsettings3) | |
246 | self.sa.commit() |
|
246 | self.sa.commit() | |
247 | except Exception: |
|
247 | except Exception: | |
248 | self.sa.rollback() |
|
248 | self.sa.rollback() | |
249 | raise |
|
249 | raise | |
250 |
|
250 | |||
251 | def create_admin_and_prompt(self): |
|
251 | def create_admin_and_prompt(self): | |
252 |
|
252 | |||
253 | # defaults |
|
253 | # defaults | |
254 | defaults = self.cli_args |
|
254 | defaults = self.cli_args | |
255 | username = defaults.get('username') |
|
255 | username = defaults.get('username') | |
256 | password = defaults.get('password') |
|
256 | password = defaults.get('password') | |
257 | email = defaults.get('email') |
|
257 | email = defaults.get('email') | |
258 |
|
258 | |||
259 | if username is None: |
|
259 | if username is None: | |
260 | username = raw_input('Specify admin username:') |
|
260 | username = raw_input('Specify admin username:') | |
261 | if password is None: |
|
261 | if password is None: | |
262 | password = self._get_admin_password() |
|
262 | password = self._get_admin_password() | |
263 | if not password: |
|
263 | if not password: | |
264 | # second try |
|
264 | # second try | |
265 | password = self._get_admin_password() |
|
265 | password = self._get_admin_password() | |
266 | if not password: |
|
266 | if not password: | |
267 | sys.exit() |
|
267 | sys.exit() | |
268 | if email is None: |
|
268 | if email is None: | |
269 | email = raw_input('Specify admin email:') |
|
269 | email = raw_input('Specify admin email:') | |
270 | api_key = self.cli_args.get('api_key') |
|
270 | api_key = self.cli_args.get('api_key') | |
271 | self.create_user(username, password, email, True, |
|
271 | self.create_user(username, password, email, True, | |
272 | strict_creation_check=False, |
|
272 | strict_creation_check=False, | |
273 | api_key=api_key) |
|
273 | api_key=api_key) | |
274 |
|
274 | |||
275 | def _get_admin_password(self): |
|
275 | def _get_admin_password(self): | |
276 | password = getpass.getpass('Specify admin password ' |
|
276 | password = getpass.getpass('Specify admin password ' | |
277 | '(min 6 chars):') |
|
277 | '(min 6 chars):') | |
278 | confirm = getpass.getpass('Confirm password:') |
|
278 | confirm = getpass.getpass('Confirm password:') | |
279 |
|
279 | |||
280 | if password != confirm: |
|
280 | if password != confirm: | |
281 | log.error('passwords mismatch') |
|
281 | log.error('passwords mismatch') | |
282 | return False |
|
282 | return False | |
283 | if len(password) < 6: |
|
283 | if len(password) < 6: | |
284 | log.error('password is too short - use at least 6 characters') |
|
284 | log.error('password is too short - use at least 6 characters') | |
285 | return False |
|
285 | return False | |
286 |
|
286 | |||
287 | return password |
|
287 | return password | |
288 |
|
288 | |||
289 | def create_test_admin_and_users(self): |
|
289 | def create_test_admin_and_users(self): | |
290 | log.info('creating admin and regular test users') |
|
290 | log.info('creating admin and regular test users') | |
291 | from rhodecode.tests import TEST_USER_ADMIN_LOGIN, \ |
|
291 | from rhodecode.tests import TEST_USER_ADMIN_LOGIN, \ | |
292 | TEST_USER_ADMIN_PASS, TEST_USER_ADMIN_EMAIL, \ |
|
292 | TEST_USER_ADMIN_PASS, TEST_USER_ADMIN_EMAIL, \ | |
293 | TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS, \ |
|
293 | TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS, \ | |
294 | TEST_USER_REGULAR_EMAIL, TEST_USER_REGULAR2_LOGIN, \ |
|
294 | TEST_USER_REGULAR_EMAIL, TEST_USER_REGULAR2_LOGIN, \ | |
295 | TEST_USER_REGULAR2_PASS, TEST_USER_REGULAR2_EMAIL |
|
295 | TEST_USER_REGULAR2_PASS, TEST_USER_REGULAR2_EMAIL | |
296 |
|
296 | |||
297 | self.create_user(TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS, |
|
297 | self.create_user(TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS, | |
298 | TEST_USER_ADMIN_EMAIL, True, api_key=True) |
|
298 | TEST_USER_ADMIN_EMAIL, True, api_key=True) | |
299 |
|
299 | |||
300 | self.create_user(TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS, |
|
300 | self.create_user(TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS, | |
301 | TEST_USER_REGULAR_EMAIL, False, api_key=True) |
|
301 | TEST_USER_REGULAR_EMAIL, False, api_key=True) | |
302 |
|
302 | |||
303 | self.create_user(TEST_USER_REGULAR2_LOGIN, TEST_USER_REGULAR2_PASS, |
|
303 | self.create_user(TEST_USER_REGULAR2_LOGIN, TEST_USER_REGULAR2_PASS, | |
304 | TEST_USER_REGULAR2_EMAIL, False, api_key=True) |
|
304 | TEST_USER_REGULAR2_EMAIL, False, api_key=True) | |
305 |
|
305 | |||
306 | def create_ui_settings(self, repo_store_path): |
|
306 | def create_ui_settings(self, repo_store_path): | |
307 | """ |
|
307 | """ | |
308 | Creates ui settings, fills out hooks |
|
308 | Creates ui settings, fills out hooks | |
309 | and disables dotencode |
|
309 | and disables dotencode | |
310 | """ |
|
310 | """ | |
311 | settings_model = SettingsModel(sa=self.sa) |
|
311 | settings_model = SettingsModel(sa=self.sa) | |
312 | from rhodecode.lib.vcs.backends.hg import largefiles_store |
|
312 | from rhodecode.lib.vcs.backends.hg import largefiles_store | |
313 | from rhodecode.lib.vcs.backends.git import lfs_store |
|
313 | from rhodecode.lib.vcs.backends.git import lfs_store | |
314 |
|
314 | |||
315 | # Build HOOKS |
|
315 | # Build HOOKS | |
316 | hooks = [ |
|
316 | hooks = [ | |
317 | (RhodeCodeUi.HOOK_REPO_SIZE, 'python:vcsserver.hooks.repo_size'), |
|
317 | (RhodeCodeUi.HOOK_REPO_SIZE, 'python:vcsserver.hooks.repo_size'), | |
318 |
|
318 | |||
319 | # HG |
|
319 | # HG | |
320 | (RhodeCodeUi.HOOK_PRE_PULL, 'python:vcsserver.hooks.pre_pull'), |
|
320 | (RhodeCodeUi.HOOK_PRE_PULL, 'python:vcsserver.hooks.pre_pull'), | |
321 | (RhodeCodeUi.HOOK_PULL, 'python:vcsserver.hooks.log_pull_action'), |
|
321 | (RhodeCodeUi.HOOK_PULL, 'python:vcsserver.hooks.log_pull_action'), | |
322 | (RhodeCodeUi.HOOK_PRE_PUSH, 'python:vcsserver.hooks.pre_push'), |
|
322 | (RhodeCodeUi.HOOK_PRE_PUSH, 'python:vcsserver.hooks.pre_push'), | |
323 | (RhodeCodeUi.HOOK_PRETX_PUSH, 'python:vcsserver.hooks.pre_push'), |
|
323 | (RhodeCodeUi.HOOK_PRETX_PUSH, 'python:vcsserver.hooks.pre_push'), | |
324 | (RhodeCodeUi.HOOK_PUSH, 'python:vcsserver.hooks.log_push_action'), |
|
324 | (RhodeCodeUi.HOOK_PUSH, 'python:vcsserver.hooks.log_push_action'), | |
325 | (RhodeCodeUi.HOOK_PUSH_KEY, 'python:vcsserver.hooks.key_push'), |
|
325 | (RhodeCodeUi.HOOK_PUSH_KEY, 'python:vcsserver.hooks.key_push'), | |
326 |
|
326 | |||
327 | ] |
|
327 | ] | |
328 |
|
328 | |||
329 | for key, value in hooks: |
|
329 | for key, value in hooks: | |
330 | hook_obj = settings_model.get_ui_by_key(key) |
|
330 | hook_obj = settings_model.get_ui_by_key(key) | |
331 | hooks2 = hook_obj if hook_obj else RhodeCodeUi() |
|
331 | hooks2 = hook_obj if hook_obj else RhodeCodeUi() | |
332 | hooks2.ui_section = 'hooks' |
|
332 | hooks2.ui_section = 'hooks' | |
333 | hooks2.ui_key = key |
|
333 | hooks2.ui_key = key | |
334 | hooks2.ui_value = value |
|
334 | hooks2.ui_value = value | |
335 | self.sa.add(hooks2) |
|
335 | self.sa.add(hooks2) | |
336 |
|
336 | |||
337 | # enable largefiles |
|
337 | # enable largefiles | |
338 | largefiles = RhodeCodeUi() |
|
338 | largefiles = RhodeCodeUi() | |
339 | largefiles.ui_section = 'extensions' |
|
339 | largefiles.ui_section = 'extensions' | |
340 | largefiles.ui_key = 'largefiles' |
|
340 | largefiles.ui_key = 'largefiles' | |
341 | largefiles.ui_value = '' |
|
341 | largefiles.ui_value = '' | |
342 | self.sa.add(largefiles) |
|
342 | self.sa.add(largefiles) | |
343 |
|
343 | |||
344 | # set default largefiles cache dir, defaults to |
|
344 | # set default largefiles cache dir, defaults to | |
345 | # /repo_store_location/.cache/largefiles |
|
345 | # /repo_store_location/.cache/largefiles | |
346 | largefiles = RhodeCodeUi() |
|
346 | largefiles = RhodeCodeUi() | |
347 | largefiles.ui_section = 'largefiles' |
|
347 | largefiles.ui_section = 'largefiles' | |
348 | largefiles.ui_key = 'usercache' |
|
348 | largefiles.ui_key = 'usercache' | |
349 | largefiles.ui_value = largefiles_store(repo_store_path) |
|
349 | largefiles.ui_value = largefiles_store(repo_store_path) | |
350 |
|
350 | |||
351 | self.sa.add(largefiles) |
|
351 | self.sa.add(largefiles) | |
352 |
|
352 | |||
353 | # set default lfs cache dir, defaults to |
|
353 | # set default lfs cache dir, defaults to | |
354 | # /repo_store_location/.cache/lfs_store |
|
354 | # /repo_store_location/.cache/lfs_store | |
355 | lfsstore = RhodeCodeUi() |
|
355 | lfsstore = RhodeCodeUi() | |
356 | lfsstore.ui_section = 'vcs_git_lfs' |
|
356 | lfsstore.ui_section = 'vcs_git_lfs' | |
357 | lfsstore.ui_key = 'store_location' |
|
357 | lfsstore.ui_key = 'store_location' | |
358 | lfsstore.ui_value = lfs_store(repo_store_path) |
|
358 | lfsstore.ui_value = lfs_store(repo_store_path) | |
359 |
|
359 | |||
360 | self.sa.add(lfsstore) |
|
360 | self.sa.add(lfsstore) | |
361 |
|
361 | |||
362 | # enable hgsubversion disabled by default |
|
362 | # enable hgsubversion disabled by default | |
363 | hgsubversion = RhodeCodeUi() |
|
363 | hgsubversion = RhodeCodeUi() | |
364 | hgsubversion.ui_section = 'extensions' |
|
364 | hgsubversion.ui_section = 'extensions' | |
365 | hgsubversion.ui_key = 'hgsubversion' |
|
365 | hgsubversion.ui_key = 'hgsubversion' | |
366 | hgsubversion.ui_value = '' |
|
366 | hgsubversion.ui_value = '' | |
367 | hgsubversion.ui_active = False |
|
367 | hgsubversion.ui_active = False | |
368 | self.sa.add(hgsubversion) |
|
368 | self.sa.add(hgsubversion) | |
369 |
|
369 | |||
370 | # enable hgevolve disabled by default |
|
370 | # enable hgevolve disabled by default | |
371 | hgevolve = RhodeCodeUi() |
|
371 | hgevolve = RhodeCodeUi() | |
372 | hgevolve.ui_section = 'extensions' |
|
372 | hgevolve.ui_section = 'extensions' | |
373 | hgevolve.ui_key = 'evolve' |
|
373 | hgevolve.ui_key = 'evolve' | |
374 | hgevolve.ui_value = '' |
|
374 | hgevolve.ui_value = '' | |
375 | hgevolve.ui_active = False |
|
375 | hgevolve.ui_active = False | |
376 | self.sa.add(hgevolve) |
|
376 | self.sa.add(hgevolve) | |
377 |
|
377 | |||
378 | hgevolve = RhodeCodeUi() |
|
378 | hgevolve = RhodeCodeUi() | |
379 | hgevolve.ui_section = 'experimental' |
|
379 | hgevolve.ui_section = 'experimental' | |
380 | hgevolve.ui_key = 'evolution' |
|
380 | hgevolve.ui_key = 'evolution' | |
381 | hgevolve.ui_value = '' |
|
381 | hgevolve.ui_value = '' | |
382 | hgevolve.ui_active = False |
|
382 | hgevolve.ui_active = False | |
383 | self.sa.add(hgevolve) |
|
383 | self.sa.add(hgevolve) | |
384 |
|
384 | |||
385 | hgevolve = RhodeCodeUi() |
|
385 | hgevolve = RhodeCodeUi() | |
386 | hgevolve.ui_section = 'experimental' |
|
386 | hgevolve.ui_section = 'experimental' | |
387 | hgevolve.ui_key = 'evolution.exchange' |
|
387 | hgevolve.ui_key = 'evolution.exchange' | |
388 | hgevolve.ui_value = '' |
|
388 | hgevolve.ui_value = '' | |
389 | hgevolve.ui_active = False |
|
389 | hgevolve.ui_active = False | |
390 | self.sa.add(hgevolve) |
|
390 | self.sa.add(hgevolve) | |
391 |
|
391 | |||
392 | hgevolve = RhodeCodeUi() |
|
392 | hgevolve = RhodeCodeUi() | |
393 | hgevolve.ui_section = 'extensions' |
|
393 | hgevolve.ui_section = 'extensions' | |
394 | hgevolve.ui_key = 'topic' |
|
394 | hgevolve.ui_key = 'topic' | |
395 | hgevolve.ui_value = '' |
|
395 | hgevolve.ui_value = '' | |
396 | hgevolve.ui_active = False |
|
396 | hgevolve.ui_active = False | |
397 | self.sa.add(hgevolve) |
|
397 | self.sa.add(hgevolve) | |
398 |
|
398 | |||
399 | # enable hggit disabled by default |
|
399 | # enable hggit disabled by default | |
400 | hggit = RhodeCodeUi() |
|
400 | hggit = RhodeCodeUi() | |
401 | hggit.ui_section = 'extensions' |
|
401 | hggit.ui_section = 'extensions' | |
402 | hggit.ui_key = 'hggit' |
|
402 | hggit.ui_key = 'hggit' | |
403 | hggit.ui_value = '' |
|
403 | hggit.ui_value = '' | |
404 | hggit.ui_active = False |
|
404 | hggit.ui_active = False | |
405 | self.sa.add(hggit) |
|
405 | self.sa.add(hggit) | |
406 |
|
406 | |||
407 | # set svn branch defaults |
|
407 | # set svn branch defaults | |
408 | branches = ["/branches/*", "/trunk"] |
|
408 | branches = ["/branches/*", "/trunk"] | |
409 | tags = ["/tags/*"] |
|
409 | tags = ["/tags/*"] | |
410 |
|
410 | |||
411 | for branch in branches: |
|
411 | for branch in branches: | |
412 | settings_model.create_ui_section_value( |
|
412 | settings_model.create_ui_section_value( | |
413 | RhodeCodeUi.SVN_BRANCH_ID, branch) |
|
413 | RhodeCodeUi.SVN_BRANCH_ID, branch) | |
414 |
|
414 | |||
415 | for tag in tags: |
|
415 | for tag in tags: | |
416 | settings_model.create_ui_section_value(RhodeCodeUi.SVN_TAG_ID, tag) |
|
416 | settings_model.create_ui_section_value(RhodeCodeUi.SVN_TAG_ID, tag) | |
417 |
|
417 | |||
418 | def create_auth_plugin_options(self, skip_existing=False): |
|
418 | def create_auth_plugin_options(self, skip_existing=False): | |
419 | """ |
|
419 | """ | |
420 | Create default auth plugin settings, and make it active |
|
420 | Create default auth plugin settings, and make it active | |
421 |
|
421 | |||
422 | :param skip_existing: |
|
422 | :param skip_existing: | |
423 | """ |
|
423 | """ | |
424 |
|
424 | |||
425 | for k, v, t in [('auth_plugins', 'egg:rhodecode-enterprise-ce#rhodecode', 'list'), |
|
425 | for k, v, t in [('auth_plugins', 'egg:rhodecode-enterprise-ce#rhodecode', 'list'), | |
426 | ('auth_rhodecode_enabled', 'True', 'bool')]: |
|
426 | ('auth_rhodecode_enabled', 'True', 'bool')]: | |
427 | if (skip_existing and |
|
427 | if (skip_existing and | |
428 | SettingsModel().get_setting_by_name(k) is not None): |
|
428 | SettingsModel().get_setting_by_name(k) is not None): | |
429 | log.debug('Skipping option %s', k) |
|
429 | log.debug('Skipping option %s', k) | |
430 | continue |
|
430 | continue | |
431 | setting = RhodeCodeSetting(k, v, t) |
|
431 | setting = RhodeCodeSetting(k, v, t) | |
432 | self.sa.add(setting) |
|
432 | self.sa.add(setting) | |
433 |
|
433 | |||
434 | def create_default_options(self, skip_existing=False): |
|
434 | def create_default_options(self, skip_existing=False): | |
435 | """Creates default settings""" |
|
435 | """Creates default settings""" | |
436 |
|
436 | |||
437 | for k, v, t in [ |
|
437 | for k, v, t in [ | |
438 | ('default_repo_enable_locking', False, 'bool'), |
|
438 | ('default_repo_enable_locking', False, 'bool'), | |
439 | ('default_repo_enable_downloads', False, 'bool'), |
|
439 | ('default_repo_enable_downloads', False, 'bool'), | |
440 | ('default_repo_enable_statistics', False, 'bool'), |
|
440 | ('default_repo_enable_statistics', False, 'bool'), | |
441 | ('default_repo_private', False, 'bool'), |
|
441 | ('default_repo_private', False, 'bool'), | |
442 | ('default_repo_type', 'hg', 'unicode')]: |
|
442 | ('default_repo_type', 'hg', 'unicode')]: | |
443 |
|
443 | |||
444 | if (skip_existing and |
|
444 | if (skip_existing and | |
445 | SettingsModel().get_setting_by_name(k) is not None): |
|
445 | SettingsModel().get_setting_by_name(k) is not None): | |
446 | log.debug('Skipping option %s', k) |
|
446 | log.debug('Skipping option %s', k) | |
447 | continue |
|
447 | continue | |
448 | setting = RhodeCodeSetting(k, v, t) |
|
448 | setting = RhodeCodeSetting(k, v, t) | |
449 | self.sa.add(setting) |
|
449 | self.sa.add(setting) | |
450 |
|
450 | |||
451 | def fixup_groups(self): |
|
451 | def fixup_groups(self): | |
452 | def_usr = User.get_default_user() |
|
452 | def_usr = User.get_default_user() | |
453 | for g in RepoGroup.query().all(): |
|
453 | for g in RepoGroup.query().all(): | |
454 | g.group_name = g.get_new_name(g.name) |
|
454 | g.group_name = g.get_new_name(g.name) | |
455 | self.sa.add(g) |
|
455 | self.sa.add(g) | |
456 | # get default perm |
|
456 | # get default perm | |
457 | default = UserRepoGroupToPerm.query()\ |
|
457 | default = UserRepoGroupToPerm.query()\ | |
458 | .filter(UserRepoGroupToPerm.group == g)\ |
|
458 | .filter(UserRepoGroupToPerm.group == g)\ | |
459 | .filter(UserRepoGroupToPerm.user == def_usr)\ |
|
459 | .filter(UserRepoGroupToPerm.user == def_usr)\ | |
460 | .scalar() |
|
460 | .scalar() | |
461 |
|
461 | |||
462 | if default is None: |
|
462 | if default is None: | |
463 | log.debug('missing default permission for group %s adding', g) |
|
463 | log.debug('missing default permission for group %s adding', g) | |
464 | perm_obj = RepoGroupModel()._create_default_perms(g) |
|
464 | perm_obj = RepoGroupModel()._create_default_perms(g) | |
465 | self.sa.add(perm_obj) |
|
465 | self.sa.add(perm_obj) | |
466 |
|
466 | |||
467 | def reset_permissions(self, username): |
|
467 | def reset_permissions(self, username): | |
468 | """ |
|
468 | """ | |
469 | Resets permissions to default state, useful when old systems had |
|
469 | Resets permissions to default state, useful when old systems had | |
470 | bad permissions, we must clean them up |
|
470 | bad permissions, we must clean them up | |
471 |
|
471 | |||
472 | :param username: |
|
472 | :param username: | |
473 | """ |
|
473 | """ | |
474 | default_user = User.get_by_username(username) |
|
474 | default_user = User.get_by_username(username) | |
475 | if not default_user: |
|
475 | if not default_user: | |
476 | return |
|
476 | return | |
477 |
|
477 | |||
478 | u2p = UserToPerm.query()\ |
|
478 | u2p = UserToPerm.query()\ | |
479 | .filter(UserToPerm.user == default_user).all() |
|
479 | .filter(UserToPerm.user == default_user).all() | |
480 | fixed = False |
|
480 | fixed = False | |
481 | if len(u2p) != len(Permission.DEFAULT_USER_PERMISSIONS): |
|
481 | if len(u2p) != len(Permission.DEFAULT_USER_PERMISSIONS): | |
482 | for p in u2p: |
|
482 | for p in u2p: | |
483 | Session().delete(p) |
|
483 | Session().delete(p) | |
484 | fixed = True |
|
484 | fixed = True | |
485 | self.populate_default_permissions() |
|
485 | self.populate_default_permissions() | |
486 | return fixed |
|
486 | return fixed | |
487 |
|
487 | |||
488 | def update_repo_info(self): |
|
|||
489 | RepoModel.update_repoinfo() |
|
|||
490 |
|
||||
491 | def config_prompt(self, test_repo_path='', retries=3): |
|
488 | def config_prompt(self, test_repo_path='', retries=3): | |
492 | defaults = self.cli_args |
|
489 | defaults = self.cli_args | |
493 | _path = defaults.get('repos_location') |
|
490 | _path = defaults.get('repos_location') | |
494 | if retries == 3: |
|
491 | if retries == 3: | |
495 | log.info('Setting up repositories config') |
|
492 | log.info('Setting up repositories config') | |
496 |
|
493 | |||
497 | if _path is not None: |
|
494 | if _path is not None: | |
498 | path = _path |
|
495 | path = _path | |
499 | elif not self.tests and not test_repo_path: |
|
496 | elif not self.tests and not test_repo_path: | |
500 | path = raw_input( |
|
497 | path = raw_input( | |
501 | 'Enter a valid absolute path to store repositories. ' |
|
498 | 'Enter a valid absolute path to store repositories. ' | |
502 | 'All repositories in that path will be added automatically:' |
|
499 | 'All repositories in that path will be added automatically:' | |
503 | ) |
|
500 | ) | |
504 | else: |
|
501 | else: | |
505 | path = test_repo_path |
|
502 | path = test_repo_path | |
506 | path_ok = True |
|
503 | path_ok = True | |
507 |
|
504 | |||
508 | # check proper dir |
|
505 | # check proper dir | |
509 | if not os.path.isdir(path): |
|
506 | if not os.path.isdir(path): | |
510 | path_ok = False |
|
507 | path_ok = False | |
511 | log.error('Given path %s is not a valid directory', path) |
|
508 | log.error('Given path %s is not a valid directory', path) | |
512 |
|
509 | |||
513 | elif not os.path.isabs(path): |
|
510 | elif not os.path.isabs(path): | |
514 | path_ok = False |
|
511 | path_ok = False | |
515 | log.error('Given path %s is not an absolute path', path) |
|
512 | log.error('Given path %s is not an absolute path', path) | |
516 |
|
513 | |||
517 | # check if path is at least readable. |
|
514 | # check if path is at least readable. | |
518 | if not os.access(path, os.R_OK): |
|
515 | if not os.access(path, os.R_OK): | |
519 | path_ok = False |
|
516 | path_ok = False | |
520 | log.error('Given path %s is not readable', path) |
|
517 | log.error('Given path %s is not readable', path) | |
521 |
|
518 | |||
522 | # check write access, warn user about non writeable paths |
|
519 | # check write access, warn user about non writeable paths | |
523 | elif not os.access(path, os.W_OK) and path_ok: |
|
520 | elif not os.access(path, os.W_OK) and path_ok: | |
524 | log.warning('No write permission to given path %s', path) |
|
521 | log.warning('No write permission to given path %s', path) | |
525 |
|
522 | |||
526 | q = ('Given path %s is not writeable, do you want to ' |
|
523 | q = ('Given path %s is not writeable, do you want to ' | |
527 | 'continue with read only mode ? [y/n]' % (path,)) |
|
524 | 'continue with read only mode ? [y/n]' % (path,)) | |
528 | if not self.ask_ok(q): |
|
525 | if not self.ask_ok(q): | |
529 | log.error('Canceled by user') |
|
526 | log.error('Canceled by user') | |
530 | sys.exit(-1) |
|
527 | sys.exit(-1) | |
531 |
|
528 | |||
532 | if retries == 0: |
|
529 | if retries == 0: | |
533 | sys.exit('max retries reached') |
|
530 | sys.exit('max retries reached') | |
534 | if not path_ok: |
|
531 | if not path_ok: | |
535 | retries -= 1 |
|
532 | retries -= 1 | |
536 | return self.config_prompt(test_repo_path, retries) |
|
533 | return self.config_prompt(test_repo_path, retries) | |
537 |
|
534 | |||
538 | real_path = os.path.normpath(os.path.realpath(path)) |
|
535 | real_path = os.path.normpath(os.path.realpath(path)) | |
539 |
|
536 | |||
540 | if real_path != os.path.normpath(path): |
|
537 | if real_path != os.path.normpath(path): | |
541 | q = ('Path looks like a symlink, RhodeCode Enterprise will store ' |
|
538 | q = ('Path looks like a symlink, RhodeCode Enterprise will store ' | |
542 | 'given path as %s ? [y/n]') % (real_path,) |
|
539 | 'given path as %s ? [y/n]') % (real_path,) | |
543 | if not self.ask_ok(q): |
|
540 | if not self.ask_ok(q): | |
544 | log.error('Canceled by user') |
|
541 | log.error('Canceled by user') | |
545 | sys.exit(-1) |
|
542 | sys.exit(-1) | |
546 |
|
543 | |||
547 | return real_path |
|
544 | return real_path | |
548 |
|
545 | |||
549 | def create_settings(self, path): |
|
546 | def create_settings(self, path): | |
550 |
|
547 | |||
551 | self.create_ui_settings(path) |
|
548 | self.create_ui_settings(path) | |
552 |
|
549 | |||
553 | ui_config = [ |
|
550 | ui_config = [ | |
554 | ('web', 'push_ssl', 'False'), |
|
551 | ('web', 'push_ssl', 'False'), | |
555 | ('web', 'allow_archive', 'gz zip bz2'), |
|
552 | ('web', 'allow_archive', 'gz zip bz2'), | |
556 | ('web', 'allow_push', '*'), |
|
553 | ('web', 'allow_push', '*'), | |
557 | ('web', 'baseurl', '/'), |
|
554 | ('web', 'baseurl', '/'), | |
558 | ('paths', '/', path), |
|
555 | ('paths', '/', path), | |
559 | ('phases', 'publish', 'True') |
|
556 | ('phases', 'publish', 'True') | |
560 | ] |
|
557 | ] | |
561 | for section, key, value in ui_config: |
|
558 | for section, key, value in ui_config: | |
562 | ui_conf = RhodeCodeUi() |
|
559 | ui_conf = RhodeCodeUi() | |
563 | setattr(ui_conf, 'ui_section', section) |
|
560 | setattr(ui_conf, 'ui_section', section) | |
564 | setattr(ui_conf, 'ui_key', key) |
|
561 | setattr(ui_conf, 'ui_key', key) | |
565 | setattr(ui_conf, 'ui_value', value) |
|
562 | setattr(ui_conf, 'ui_value', value) | |
566 | self.sa.add(ui_conf) |
|
563 | self.sa.add(ui_conf) | |
567 |
|
564 | |||
568 | # rhodecode app settings |
|
565 | # rhodecode app settings | |
569 | settings = [ |
|
566 | settings = [ | |
570 | ('realm', 'RhodeCode', 'unicode'), |
|
567 | ('realm', 'RhodeCode', 'unicode'), | |
571 | ('title', '', 'unicode'), |
|
568 | ('title', '', 'unicode'), | |
572 | ('pre_code', '', 'unicode'), |
|
569 | ('pre_code', '', 'unicode'), | |
573 | ('post_code', '', 'unicode'), |
|
570 | ('post_code', '', 'unicode'), | |
574 | ('show_public_icon', True, 'bool'), |
|
571 | ('show_public_icon', True, 'bool'), | |
575 | ('show_private_icon', True, 'bool'), |
|
572 | ('show_private_icon', True, 'bool'), | |
576 | ('stylify_metatags', False, 'bool'), |
|
573 | ('stylify_metatags', False, 'bool'), | |
577 | ('dashboard_items', 100, 'int'), |
|
574 | ('dashboard_items', 100, 'int'), | |
578 | ('admin_grid_items', 25, 'int'), |
|
575 | ('admin_grid_items', 25, 'int'), | |
579 | ('show_version', True, 'bool'), |
|
576 | ('show_version', True, 'bool'), | |
580 | ('use_gravatar', False, 'bool'), |
|
577 | ('use_gravatar', False, 'bool'), | |
581 | ('gravatar_url', User.DEFAULT_GRAVATAR_URL, 'unicode'), |
|
578 | ('gravatar_url', User.DEFAULT_GRAVATAR_URL, 'unicode'), | |
582 | ('clone_uri_tmpl', Repository.DEFAULT_CLONE_URI, 'unicode'), |
|
579 | ('clone_uri_tmpl', Repository.DEFAULT_CLONE_URI, 'unicode'), | |
583 | ('support_url', '', 'unicode'), |
|
580 | ('support_url', '', 'unicode'), | |
584 | ('update_url', RhodeCodeSetting.DEFAULT_UPDATE_URL, 'unicode'), |
|
581 | ('update_url', RhodeCodeSetting.DEFAULT_UPDATE_URL, 'unicode'), | |
585 | ('show_revision_number', True, 'bool'), |
|
582 | ('show_revision_number', True, 'bool'), | |
586 | ('show_sha_length', 12, 'int'), |
|
583 | ('show_sha_length', 12, 'int'), | |
587 | ] |
|
584 | ] | |
588 |
|
585 | |||
589 | for key, val, type_ in settings: |
|
586 | for key, val, type_ in settings: | |
590 | sett = RhodeCodeSetting(key, val, type_) |
|
587 | sett = RhodeCodeSetting(key, val, type_) | |
591 | self.sa.add(sett) |
|
588 | self.sa.add(sett) | |
592 |
|
589 | |||
593 | self.create_auth_plugin_options() |
|
590 | self.create_auth_plugin_options() | |
594 | self.create_default_options() |
|
591 | self.create_default_options() | |
595 |
|
592 | |||
596 | log.info('created ui config') |
|
593 | log.info('created ui config') | |
597 |
|
594 | |||
598 | def create_user(self, username, password, email='', admin=False, |
|
595 | def create_user(self, username, password, email='', admin=False, | |
599 | strict_creation_check=True, api_key=None): |
|
596 | strict_creation_check=True, api_key=None): | |
600 | log.info('creating user `%s`', username) |
|
597 | log.info('creating user `%s`', username) | |
601 | user = UserModel().create_or_update( |
|
598 | user = UserModel().create_or_update( | |
602 | username, password, email, firstname=u'RhodeCode', lastname=u'Admin', |
|
599 | username, password, email, firstname=u'RhodeCode', lastname=u'Admin', | |
603 | active=True, admin=admin, extern_type="rhodecode", |
|
600 | active=True, admin=admin, extern_type="rhodecode", | |
604 | strict_creation_check=strict_creation_check) |
|
601 | strict_creation_check=strict_creation_check) | |
605 |
|
602 | |||
606 | if api_key: |
|
603 | if api_key: | |
607 | log.info('setting a new default auth token for user `%s`', username) |
|
604 | log.info('setting a new default auth token for user `%s`', username) | |
608 | UserModel().add_auth_token( |
|
605 | UserModel().add_auth_token( | |
609 | user=user, lifetime_minutes=-1, |
|
606 | user=user, lifetime_minutes=-1, | |
610 | role=UserModel.auth_token_role.ROLE_ALL, |
|
607 | role=UserModel.auth_token_role.ROLE_ALL, | |
611 | description=u'BUILTIN TOKEN') |
|
608 | description=u'BUILTIN TOKEN') | |
612 |
|
609 | |||
613 | def create_default_user(self): |
|
610 | def create_default_user(self): | |
614 | log.info('creating default user') |
|
611 | log.info('creating default user') | |
615 | # create default user for handling default permissions. |
|
612 | # create default user for handling default permissions. | |
616 | user = UserModel().create_or_update(username=User.DEFAULT_USER, |
|
613 | user = UserModel().create_or_update(username=User.DEFAULT_USER, | |
617 | password=str(uuid.uuid1())[:20], |
|
614 | password=str(uuid.uuid1())[:20], | |
618 | email=User.DEFAULT_USER_EMAIL, |
|
615 | email=User.DEFAULT_USER_EMAIL, | |
619 | firstname=u'Anonymous', |
|
616 | firstname=u'Anonymous', | |
620 | lastname=u'User', |
|
617 | lastname=u'User', | |
621 | strict_creation_check=False) |
|
618 | strict_creation_check=False) | |
622 | # based on configuration options activate/de-activate this user which |
|
619 | # based on configuration options activate/de-activate this user which | |
623 | # controlls anonymous access |
|
620 | # controlls anonymous access | |
624 | if self.cli_args.get('public_access') is False: |
|
621 | if self.cli_args.get('public_access') is False: | |
625 | log.info('Public access disabled') |
|
622 | log.info('Public access disabled') | |
626 | user.active = False |
|
623 | user.active = False | |
627 | Session().add(user) |
|
624 | Session().add(user) | |
628 | Session().commit() |
|
625 | Session().commit() | |
629 |
|
626 | |||
630 | def create_permissions(self): |
|
627 | def create_permissions(self): | |
631 | """ |
|
628 | """ | |
632 | Creates all permissions defined in the system |
|
629 | Creates all permissions defined in the system | |
633 | """ |
|
630 | """ | |
634 | # module.(access|create|change|delete)_[name] |
|
631 | # module.(access|create|change|delete)_[name] | |
635 | # module.(none|read|write|admin) |
|
632 | # module.(none|read|write|admin) | |
636 | log.info('creating permissions') |
|
633 | log.info('creating permissions') | |
637 | PermissionModel(self.sa).create_permissions() |
|
634 | PermissionModel(self.sa).create_permissions() | |
638 |
|
635 | |||
639 | def populate_default_permissions(self): |
|
636 | def populate_default_permissions(self): | |
640 | """ |
|
637 | """ | |
641 | Populate default permissions. It will create only the default |
|
638 | Populate default permissions. It will create only the default | |
642 | permissions that are missing, and not alter already defined ones |
|
639 | permissions that are missing, and not alter already defined ones | |
643 | """ |
|
640 | """ | |
644 | log.info('creating default user permissions') |
|
641 | log.info('creating default user permissions') | |
645 | PermissionModel(self.sa).create_default_user_permissions(user=User.DEFAULT_USER) |
|
642 | PermissionModel(self.sa).create_default_user_permissions(user=User.DEFAULT_USER) |
@@ -1,1084 +1,1084 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2019 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2019 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import os |
|
21 | import os | |
22 | import time |
|
22 | import time | |
23 | import logging |
|
23 | import logging | |
24 | import datetime |
|
24 | import datetime | |
25 | import traceback |
|
25 | import traceback | |
26 | import hashlib |
|
26 | import hashlib | |
27 | import collections |
|
27 | import collections | |
28 |
|
28 | |||
29 | from sqlalchemy import * |
|
29 | from sqlalchemy import * | |
30 | from sqlalchemy.ext.hybrid import hybrid_property |
|
30 | from sqlalchemy.ext.hybrid import hybrid_property | |
31 | from sqlalchemy.orm import relationship, joinedload, class_mapper, validates |
|
31 | from sqlalchemy.orm import relationship, joinedload, class_mapper, validates | |
32 | from sqlalchemy.exc import DatabaseError |
|
32 | from sqlalchemy.exc import DatabaseError | |
33 | from beaker.cache import cache_region, region_invalidate |
|
33 | from beaker.cache import cache_region, region_invalidate | |
34 | from webob.exc import HTTPNotFound |
|
34 | from webob.exc import HTTPNotFound | |
35 |
|
35 | |||
36 | from rhodecode.translation import _ |
|
36 | from rhodecode.translation import _ | |
37 |
|
37 | |||
38 | from rhodecode.lib.vcs import get_backend |
|
38 | from rhodecode.lib.vcs import get_backend | |
39 | from rhodecode.lib.vcs.utils.helpers import get_scm |
|
39 | from rhodecode.lib.vcs.utils.helpers import get_scm | |
40 | from rhodecode.lib.vcs.exceptions import VCSError |
|
40 | from rhodecode.lib.vcs.exceptions import VCSError | |
41 | from zope.cachedescriptors.property import Lazy as LazyProperty |
|
41 | from zope.cachedescriptors.property import Lazy as LazyProperty | |
42 | from rhodecode.lib.vcs.backends.base import EmptyCommit |
|
42 | from rhodecode.lib.vcs.backends.base import EmptyCommit | |
43 |
|
43 | |||
44 | from rhodecode.lib.utils2 import str2bool, safe_str, get_commit_safe, \ |
|
44 | from rhodecode.lib.utils2 import str2bool, safe_str, get_commit_safe, \ | |
45 | safe_unicode, remove_suffix, remove_prefix, time_to_datetime |
|
45 | safe_unicode, remove_suffix, remove_prefix, time_to_datetime | |
46 | from rhodecode.lib.ext_json import json |
|
46 | from rhodecode.lib.ext_json import json | |
47 | from rhodecode.lib.caching_query import FromCache |
|
47 | from rhodecode.lib.caching_query import FromCache | |
48 |
|
48 | |||
49 | from rhodecode.model.meta import Base, Session |
|
49 | from rhodecode.model.meta import Base, Session | |
50 |
|
50 | |||
51 | URL_SEP = '/' |
|
51 | URL_SEP = '/' | |
52 | log = logging.getLogger(__name__) |
|
52 | log = logging.getLogger(__name__) | |
53 |
|
53 | |||
54 | #============================================================================== |
|
54 | #============================================================================== | |
55 | # BASE CLASSES |
|
55 | # BASE CLASSES | |
56 | #============================================================================== |
|
56 | #============================================================================== | |
57 |
|
57 | |||
58 | _hash_key = lambda k: hashlib.md5(safe_str(k)).hexdigest() |
|
58 | _hash_key = lambda k: hashlib.md5(safe_str(k)).hexdigest() | |
59 |
|
59 | |||
60 |
|
60 | |||
61 | class BaseModel(object): |
|
61 | class BaseModel(object): | |
62 | """ |
|
62 | """ | |
63 | Base Model for all classes |
|
63 | Base Model for all classes | |
64 | """ |
|
64 | """ | |
65 |
|
65 | |||
66 | @classmethod |
|
66 | @classmethod | |
67 | def _get_keys(cls): |
|
67 | def _get_keys(cls): | |
68 | """return column names for this model """ |
|
68 | """return column names for this model """ | |
69 | return class_mapper(cls).c.keys() |
|
69 | return class_mapper(cls).c.keys() | |
70 |
|
70 | |||
71 | def get_dict(self): |
|
71 | def get_dict(self): | |
72 | """ |
|
72 | """ | |
73 | return dict with keys and values corresponding |
|
73 | return dict with keys and values corresponding | |
74 | to this model data """ |
|
74 | to this model data """ | |
75 |
|
75 | |||
76 | d = {} |
|
76 | d = {} | |
77 | for k in self._get_keys(): |
|
77 | for k in self._get_keys(): | |
78 | d[k] = getattr(self, k) |
|
78 | d[k] = getattr(self, k) | |
79 |
|
79 | |||
80 | # also use __json__() if present to get additional fields |
|
80 | # also use __json__() if present to get additional fields | |
81 | _json_attr = getattr(self, '__json__', None) |
|
81 | _json_attr = getattr(self, '__json__', None) | |
82 | if _json_attr: |
|
82 | if _json_attr: | |
83 | # update with attributes from __json__ |
|
83 | # update with attributes from __json__ | |
84 | if callable(_json_attr): |
|
84 | if callable(_json_attr): | |
85 | _json_attr = _json_attr() |
|
85 | _json_attr = _json_attr() | |
86 | for k, val in _json_attr.iteritems(): |
|
86 | for k, val in _json_attr.iteritems(): | |
87 | d[k] = val |
|
87 | d[k] = val | |
88 | return d |
|
88 | return d | |
89 |
|
89 | |||
90 | def get_appstruct(self): |
|
90 | def get_appstruct(self): | |
91 | """return list with keys and values tupples corresponding |
|
91 | """return list with keys and values tupples corresponding | |
92 | to this model data """ |
|
92 | to this model data """ | |
93 |
|
93 | |||
94 | l = [] |
|
94 | l = [] | |
95 | for k in self._get_keys(): |
|
95 | for k in self._get_keys(): | |
96 | l.append((k, getattr(self, k),)) |
|
96 | l.append((k, getattr(self, k),)) | |
97 | return l |
|
97 | return l | |
98 |
|
98 | |||
99 | def populate_obj(self, populate_dict): |
|
99 | def populate_obj(self, populate_dict): | |
100 | """populate model with data from given populate_dict""" |
|
100 | """populate model with data from given populate_dict""" | |
101 |
|
101 | |||
102 | for k in self._get_keys(): |
|
102 | for k in self._get_keys(): | |
103 | if k in populate_dict: |
|
103 | if k in populate_dict: | |
104 | setattr(self, k, populate_dict[k]) |
|
104 | setattr(self, k, populate_dict[k]) | |
105 |
|
105 | |||
106 | @classmethod |
|
106 | @classmethod | |
107 | def query(cls): |
|
107 | def query(cls): | |
108 | return Session().query(cls) |
|
108 | return Session().query(cls) | |
109 |
|
109 | |||
110 | @classmethod |
|
110 | @classmethod | |
111 | def get(cls, id_): |
|
111 | def get(cls, id_): | |
112 | if id_: |
|
112 | if id_: | |
113 | return cls.query().get(id_) |
|
113 | return cls.query().get(id_) | |
114 |
|
114 | |||
115 | @classmethod |
|
115 | @classmethod | |
116 | def get_or_404(cls, id_): |
|
116 | def get_or_404(cls, id_): | |
117 | try: |
|
117 | try: | |
118 | id_ = int(id_) |
|
118 | id_ = int(id_) | |
119 | except (TypeError, ValueError): |
|
119 | except (TypeError, ValueError): | |
120 | raise HTTPNotFound |
|
120 | raise HTTPNotFound | |
121 |
|
121 | |||
122 | res = cls.query().get(id_) |
|
122 | res = cls.query().get(id_) | |
123 | if not res: |
|
123 | if not res: | |
124 | raise HTTPNotFound |
|
124 | raise HTTPNotFound | |
125 | return res |
|
125 | return res | |
126 |
|
126 | |||
127 | @classmethod |
|
127 | @classmethod | |
128 | def getAll(cls): |
|
128 | def getAll(cls): | |
129 | # deprecated and left for backward compatibility |
|
129 | # deprecated and left for backward compatibility | |
130 | return cls.get_all() |
|
130 | return cls.get_all() | |
131 |
|
131 | |||
132 | @classmethod |
|
132 | @classmethod | |
133 | def get_all(cls): |
|
133 | def get_all(cls): | |
134 | return cls.query().all() |
|
134 | return cls.query().all() | |
135 |
|
135 | |||
136 | @classmethod |
|
136 | @classmethod | |
137 | def delete(cls, id_): |
|
137 | def delete(cls, id_): | |
138 | obj = cls.query().get(id_) |
|
138 | obj = cls.query().get(id_) | |
139 | Session().delete(obj) |
|
139 | Session().delete(obj) | |
140 |
|
140 | |||
141 | def __repr__(self): |
|
141 | def __repr__(self): | |
142 | if hasattr(self, '__unicode__'): |
|
142 | if hasattr(self, '__unicode__'): | |
143 | # python repr needs to return str |
|
143 | # python repr needs to return str | |
144 | return safe_str(self.__unicode__()) |
|
144 | return safe_str(self.__unicode__()) | |
145 | return '<DB:%s>' % (self.__class__.__name__) |
|
145 | return '<DB:%s>' % (self.__class__.__name__) | |
146 |
|
146 | |||
147 |
|
147 | |||
148 | class RhodeCodeSetting(Base, BaseModel): |
|
148 | class RhodeCodeSetting(Base, BaseModel): | |
149 | __tablename__ = 'rhodecode_settings' |
|
149 | __tablename__ = 'rhodecode_settings' | |
150 | __table_args__ = ( |
|
150 | __table_args__ = ( | |
151 | UniqueConstraint('app_settings_name'), |
|
151 | UniqueConstraint('app_settings_name'), | |
152 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
152 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
153 | 'mysql_charset': 'utf8'} |
|
153 | 'mysql_charset': 'utf8'} | |
154 | ) |
|
154 | ) | |
155 | app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
155 | app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
156 | app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None) |
|
156 | app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None) | |
157 | _app_settings_value = Column("app_settings_value", String(255), nullable=True, unique=None, default=None) |
|
157 | _app_settings_value = Column("app_settings_value", String(255), nullable=True, unique=None, default=None) | |
158 |
|
158 | |||
159 | def __init__(self, k='', v=''): |
|
159 | def __init__(self, k='', v=''): | |
160 | self.app_settings_name = k |
|
160 | self.app_settings_name = k | |
161 | self.app_settings_value = v |
|
161 | self.app_settings_value = v | |
162 |
|
162 | |||
163 | @validates('_app_settings_value') |
|
163 | @validates('_app_settings_value') | |
164 | def validate_settings_value(self, key, val): |
|
164 | def validate_settings_value(self, key, val): | |
165 | assert type(val) == unicode |
|
165 | assert type(val) == unicode | |
166 | return val |
|
166 | return val | |
167 |
|
167 | |||
168 | @hybrid_property |
|
168 | @hybrid_property | |
169 | def app_settings_value(self): |
|
169 | def app_settings_value(self): | |
170 | v = self._app_settings_value |
|
170 | v = self._app_settings_value | |
171 | if self.app_settings_name in ["ldap_active", |
|
171 | if self.app_settings_name in ["ldap_active", | |
172 | "default_repo_enable_statistics", |
|
172 | "default_repo_enable_statistics", | |
173 | "default_repo_enable_locking", |
|
173 | "default_repo_enable_locking", | |
174 | "default_repo_private", |
|
174 | "default_repo_private", | |
175 | "default_repo_enable_downloads"]: |
|
175 | "default_repo_enable_downloads"]: | |
176 | v = str2bool(v) |
|
176 | v = str2bool(v) | |
177 | return v |
|
177 | return v | |
178 |
|
178 | |||
179 | @app_settings_value.setter |
|
179 | @app_settings_value.setter | |
180 | def app_settings_value(self, val): |
|
180 | def app_settings_value(self, val): | |
181 | """ |
|
181 | """ | |
182 | Setter that will always make sure we use unicode in app_settings_value |
|
182 | Setter that will always make sure we use unicode in app_settings_value | |
183 |
|
183 | |||
184 | :param val: |
|
184 | :param val: | |
185 | """ |
|
185 | """ | |
186 | self._app_settings_value = safe_unicode(val) |
|
186 | self._app_settings_value = safe_unicode(val) | |
187 |
|
187 | |||
188 | def __unicode__(self): |
|
188 | def __unicode__(self): | |
189 | return u"<%s('%s:%s')>" % ( |
|
189 | return u"<%s('%s:%s')>" % ( | |
190 | self.__class__.__name__, |
|
190 | self.__class__.__name__, | |
191 | self.app_settings_name, self.app_settings_value |
|
191 | self.app_settings_name, self.app_settings_value | |
192 | ) |
|
192 | ) | |
193 |
|
193 | |||
194 |
|
194 | |||
195 | class RhodeCodeUi(Base, BaseModel): |
|
195 | class RhodeCodeUi(Base, BaseModel): | |
196 | __tablename__ = 'rhodecode_ui' |
|
196 | __tablename__ = 'rhodecode_ui' | |
197 | __table_args__ = ( |
|
197 | __table_args__ = ( | |
198 | UniqueConstraint('ui_key'), |
|
198 | UniqueConstraint('ui_key'), | |
199 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
199 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
200 | 'mysql_charset': 'utf8'} |
|
200 | 'mysql_charset': 'utf8'} | |
201 | ) |
|
201 | ) | |
202 |
|
202 | |||
203 | HOOK_REPO_SIZE = 'changegroup.repo_size' |
|
203 | HOOK_REPO_SIZE = 'changegroup.repo_size' | |
204 | HOOK_PUSH = 'changegroup.push_logger' |
|
204 | HOOK_PUSH = 'changegroup.push_logger' | |
205 | HOOK_PRE_PUSH = 'prechangegroup.pre_push' |
|
205 | HOOK_PRE_PUSH = 'prechangegroup.pre_push' | |
206 | HOOK_PULL = 'outgoing.pull_logger' |
|
206 | HOOK_PULL = 'outgoing.pull_logger' | |
207 | HOOK_PRE_PULL = 'preoutgoing.pre_pull' |
|
207 | HOOK_PRE_PULL = 'preoutgoing.pre_pull' | |
208 |
|
208 | |||
209 | ui_id = Column("ui_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
209 | ui_id = Column("ui_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
210 | ui_section = Column("ui_section", String(255), nullable=True, unique=None, default=None) |
|
210 | ui_section = Column("ui_section", String(255), nullable=True, unique=None, default=None) | |
211 | ui_key = Column("ui_key", String(255), nullable=True, unique=None, default=None) |
|
211 | ui_key = Column("ui_key", String(255), nullable=True, unique=None, default=None) | |
212 | ui_value = Column("ui_value", String(255), nullable=True, unique=None, default=None) |
|
212 | ui_value = Column("ui_value", String(255), nullable=True, unique=None, default=None) | |
213 | ui_active = Column("ui_active", Boolean(), nullable=True, unique=None, default=True) |
|
213 | ui_active = Column("ui_active", Boolean(), nullable=True, unique=None, default=True) | |
214 |
|
214 | |||
215 |
|
215 | |||
216 |
|
216 | |||
217 | class User(Base, BaseModel): |
|
217 | class User(Base, BaseModel): | |
218 | __tablename__ = 'users' |
|
218 | __tablename__ = 'users' | |
219 | __table_args__ = ( |
|
219 | __table_args__ = ( | |
220 | UniqueConstraint('username'), UniqueConstraint('email'), |
|
220 | UniqueConstraint('username'), UniqueConstraint('email'), | |
221 | Index('u_username_idx', 'username'), |
|
221 | Index('u_username_idx', 'username'), | |
222 | Index('u_email_idx', 'email'), |
|
222 | Index('u_email_idx', 'email'), | |
223 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
223 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
224 | 'mysql_charset': 'utf8'} |
|
224 | 'mysql_charset': 'utf8'} | |
225 | ) |
|
225 | ) | |
226 | DEFAULT_USER = 'default' |
|
226 | DEFAULT_USER = 'default' | |
227 | DEFAULT_PERMISSIONS = [ |
|
227 | DEFAULT_PERMISSIONS = [ | |
228 | 'hg.register.manual_activate', 'hg.create.repository', |
|
228 | 'hg.register.manual_activate', 'hg.create.repository', | |
229 | 'hg.fork.repository', 'repository.read', 'group.read' |
|
229 | 'hg.fork.repository', 'repository.read', 'group.read' | |
230 | ] |
|
230 | ] | |
231 | user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
231 | user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
232 | username = Column("username", String(255), nullable=True, unique=None, default=None) |
|
232 | username = Column("username", String(255), nullable=True, unique=None, default=None) | |
233 | password = Column("password", String(255), nullable=True, unique=None, default=None) |
|
233 | password = Column("password", String(255), nullable=True, unique=None, default=None) | |
234 | active = Column("active", Boolean(), nullable=True, unique=None, default=True) |
|
234 | active = Column("active", Boolean(), nullable=True, unique=None, default=True) | |
235 | admin = Column("admin", Boolean(), nullable=True, unique=None, default=False) |
|
235 | admin = Column("admin", Boolean(), nullable=True, unique=None, default=False) | |
236 | name = Column("firstname", String(255), nullable=True, unique=None, default=None) |
|
236 | name = Column("firstname", String(255), nullable=True, unique=None, default=None) | |
237 | lastname = Column("lastname", String(255), nullable=True, unique=None, default=None) |
|
237 | lastname = Column("lastname", String(255), nullable=True, unique=None, default=None) | |
238 | _email = Column("email", String(255), nullable=True, unique=None, default=None) |
|
238 | _email = Column("email", String(255), nullable=True, unique=None, default=None) | |
239 | last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None) |
|
239 | last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None) | |
240 | ldap_dn = Column("ldap_dn", String(255), nullable=True, unique=None, default=None) |
|
240 | ldap_dn = Column("ldap_dn", String(255), nullable=True, unique=None, default=None) | |
241 | api_key = Column("api_key", String(255), nullable=True, unique=None, default=None) |
|
241 | api_key = Column("api_key", String(255), nullable=True, unique=None, default=None) | |
242 | inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True) |
|
242 | inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True) | |
243 |
|
243 | |||
244 | user_log = relationship('UserLog') |
|
244 | user_log = relationship('UserLog') | |
245 | user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all') |
|
245 | user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all') | |
246 |
|
246 | |||
247 | repositories = relationship('Repository') |
|
247 | repositories = relationship('Repository') | |
248 | user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all') |
|
248 | user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all') | |
249 | followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all') |
|
249 | followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all') | |
250 |
|
250 | |||
251 | repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all') |
|
251 | repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all') | |
252 | repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all') |
|
252 | repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all') | |
253 |
|
253 | |||
254 | group_member = relationship('UserGroupMember', cascade='all') |
|
254 | group_member = relationship('UserGroupMember', cascade='all') | |
255 |
|
255 | |||
256 | notifications = relationship('UserNotification', cascade='all') |
|
256 | notifications = relationship('UserNotification', cascade='all') | |
257 | # notifications assigned to this user |
|
257 | # notifications assigned to this user | |
258 | user_created_notifications = relationship('Notification', cascade='all') |
|
258 | user_created_notifications = relationship('Notification', cascade='all') | |
259 | # comments created by this user |
|
259 | # comments created by this user | |
260 | user_comments = relationship('ChangesetComment', cascade='all') |
|
260 | user_comments = relationship('ChangesetComment', cascade='all') | |
261 | user_emails = relationship('UserEmailMap', cascade='all') |
|
261 | user_emails = relationship('UserEmailMap', cascade='all') | |
262 |
|
262 | |||
263 | @hybrid_property |
|
263 | @hybrid_property | |
264 | def email(self): |
|
264 | def email(self): | |
265 | return self._email |
|
265 | return self._email | |
266 |
|
266 | |||
267 | @email.setter |
|
267 | @email.setter | |
268 | def email(self, val): |
|
268 | def email(self, val): | |
269 | self._email = val.lower() if val else None |
|
269 | self._email = val.lower() if val else None | |
270 |
|
270 | |||
271 | @property |
|
271 | @property | |
272 | def firstname(self): |
|
272 | def firstname(self): | |
273 | # alias for future |
|
273 | # alias for future | |
274 | return self.name |
|
274 | return self.name | |
275 |
|
275 | |||
276 | @property |
|
276 | @property | |
277 | def username_and_name(self): |
|
277 | def username_and_name(self): | |
278 | return '%s (%s %s)' % (self.username, self.firstname, self.lastname) |
|
278 | return '%s (%s %s)' % (self.username, self.firstname, self.lastname) | |
279 |
|
279 | |||
280 | @property |
|
280 | @property | |
281 | def full_name(self): |
|
281 | def full_name(self): | |
282 | return '%s %s' % (self.firstname, self.lastname) |
|
282 | return '%s %s' % (self.firstname, self.lastname) | |
283 |
|
283 | |||
284 | @property |
|
284 | @property | |
285 | def full_contact(self): |
|
285 | def full_contact(self): | |
286 | return '%s %s <%s>' % (self.firstname, self.lastname, self.email) |
|
286 | return '%s %s <%s>' % (self.firstname, self.lastname, self.email) | |
287 |
|
287 | |||
288 | @property |
|
288 | @property | |
289 | def short_contact(self): |
|
289 | def short_contact(self): | |
290 | return '%s %s' % (self.firstname, self.lastname) |
|
290 | return '%s %s' % (self.firstname, self.lastname) | |
291 |
|
291 | |||
292 | @property |
|
292 | @property | |
293 | def is_admin(self): |
|
293 | def is_admin(self): | |
294 | return self.admin |
|
294 | return self.admin | |
295 |
|
295 | |||
296 | @classmethod |
|
296 | @classmethod | |
297 | def get_by_username(cls, username, case_insensitive=False, cache=False): |
|
297 | def get_by_username(cls, username, case_insensitive=False, cache=False): | |
298 | if case_insensitive: |
|
298 | if case_insensitive: | |
299 | q = cls.query().filter(cls.username.ilike(username)) |
|
299 | q = cls.query().filter(cls.username.ilike(username)) | |
300 | else: |
|
300 | else: | |
301 | q = cls.query().filter(cls.username == username) |
|
301 | q = cls.query().filter(cls.username == username) | |
302 |
|
302 | |||
303 | if cache: |
|
303 | if cache: | |
304 | q = q.options(FromCache( |
|
304 | q = q.options(FromCache( | |
305 | "sql_cache_short", |
|
305 | "sql_cache_short", | |
306 | "get_user_%s" % _hash_key(username) |
|
306 | "get_user_%s" % _hash_key(username) | |
307 | ) |
|
307 | ) | |
308 | ) |
|
308 | ) | |
309 | return q.scalar() |
|
309 | return q.scalar() | |
310 |
|
310 | |||
311 | @classmethod |
|
311 | @classmethod | |
312 | def get_by_auth_token(cls, auth_token, cache=False): |
|
312 | def get_by_auth_token(cls, auth_token, cache=False): | |
313 | q = cls.query().filter(cls.api_key == auth_token) |
|
313 | q = cls.query().filter(cls.api_key == auth_token) | |
314 |
|
314 | |||
315 | if cache: |
|
315 | if cache: | |
316 | q = q.options(FromCache("sql_cache_short", |
|
316 | q = q.options(FromCache("sql_cache_short", | |
317 | "get_auth_token_%s" % auth_token)) |
|
317 | "get_auth_token_%s" % auth_token)) | |
318 | return q.scalar() |
|
318 | return q.scalar() | |
319 |
|
319 | |||
320 | @classmethod |
|
320 | @classmethod | |
321 | def get_by_email(cls, email, case_insensitive=False, cache=False): |
|
321 | def get_by_email(cls, email, case_insensitive=False, cache=False): | |
322 | if case_insensitive: |
|
322 | if case_insensitive: | |
323 | q = cls.query().filter(cls.email.ilike(email)) |
|
323 | q = cls.query().filter(cls.email.ilike(email)) | |
324 | else: |
|
324 | else: | |
325 | q = cls.query().filter(cls.email == email) |
|
325 | q = cls.query().filter(cls.email == email) | |
326 |
|
326 | |||
327 | if cache: |
|
327 | if cache: | |
328 | q = q.options(FromCache("sql_cache_short", |
|
328 | q = q.options(FromCache("sql_cache_short", | |
329 | "get_email_key_%s" % email)) |
|
329 | "get_email_key_%s" % email)) | |
330 |
|
330 | |||
331 | ret = q.scalar() |
|
331 | ret = q.scalar() | |
332 | if ret is None: |
|
332 | if ret is None: | |
333 | q = UserEmailMap.query() |
|
333 | q = UserEmailMap.query() | |
334 | # try fetching in alternate email map |
|
334 | # try fetching in alternate email map | |
335 | if case_insensitive: |
|
335 | if case_insensitive: | |
336 | q = q.filter(UserEmailMap.email.ilike(email)) |
|
336 | q = q.filter(UserEmailMap.email.ilike(email)) | |
337 | else: |
|
337 | else: | |
338 | q = q.filter(UserEmailMap.email == email) |
|
338 | q = q.filter(UserEmailMap.email == email) | |
339 | q = q.options(joinedload(UserEmailMap.user)) |
|
339 | q = q.options(joinedload(UserEmailMap.user)) | |
340 | if cache: |
|
340 | if cache: | |
341 | q = q.options(FromCache("sql_cache_short", |
|
341 | q = q.options(FromCache("sql_cache_short", | |
342 | "get_email_map_key_%s" % email)) |
|
342 | "get_email_map_key_%s" % email)) | |
343 | ret = getattr(q.scalar(), 'user', None) |
|
343 | ret = getattr(q.scalar(), 'user', None) | |
344 |
|
344 | |||
345 | return ret |
|
345 | return ret | |
346 |
|
346 | |||
347 |
|
347 | |||
348 | class UserEmailMap(Base, BaseModel): |
|
348 | class UserEmailMap(Base, BaseModel): | |
349 | __tablename__ = 'user_email_map' |
|
349 | __tablename__ = 'user_email_map' | |
350 | __table_args__ = ( |
|
350 | __table_args__ = ( | |
351 | Index('uem_email_idx', 'email'), |
|
351 | Index('uem_email_idx', 'email'), | |
352 | UniqueConstraint('email'), |
|
352 | UniqueConstraint('email'), | |
353 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
353 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
354 | 'mysql_charset': 'utf8'} |
|
354 | 'mysql_charset': 'utf8'} | |
355 | ) |
|
355 | ) | |
356 | __mapper_args__ = {} |
|
356 | __mapper_args__ = {} | |
357 |
|
357 | |||
358 | email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
358 | email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
359 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) |
|
359 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) | |
360 | _email = Column("email", String(255), nullable=True, unique=False, default=None) |
|
360 | _email = Column("email", String(255), nullable=True, unique=False, default=None) | |
361 | user = relationship('User', lazy='joined') |
|
361 | user = relationship('User', lazy='joined') | |
362 |
|
362 | |||
363 | @validates('_email') |
|
363 | @validates('_email') | |
364 | def validate_email(self, key, email): |
|
364 | def validate_email(self, key, email): | |
365 | # check if this email is not main one |
|
365 | # check if this email is not main one | |
366 | main_email = Session().query(User).filter(User.email == email).scalar() |
|
366 | main_email = Session().query(User).filter(User.email == email).scalar() | |
367 | if main_email is not None: |
|
367 | if main_email is not None: | |
368 | raise AttributeError('email %s is present is user table' % email) |
|
368 | raise AttributeError('email %s is present is user table' % email) | |
369 | return email |
|
369 | return email | |
370 |
|
370 | |||
371 | @hybrid_property |
|
371 | @hybrid_property | |
372 | def email(self): |
|
372 | def email(self): | |
373 | return self._email |
|
373 | return self._email | |
374 |
|
374 | |||
375 | @email.setter |
|
375 | @email.setter | |
376 | def email(self, val): |
|
376 | def email(self, val): | |
377 | self._email = val.lower() if val else None |
|
377 | self._email = val.lower() if val else None | |
378 |
|
378 | |||
379 |
|
379 | |||
380 | class UserIpMap(Base, BaseModel): |
|
380 | class UserIpMap(Base, BaseModel): | |
381 | __tablename__ = 'user_ip_map' |
|
381 | __tablename__ = 'user_ip_map' | |
382 | __table_args__ = ( |
|
382 | __table_args__ = ( | |
383 | UniqueConstraint('user_id', 'ip_addr'), |
|
383 | UniqueConstraint('user_id', 'ip_addr'), | |
384 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
384 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
385 | 'mysql_charset': 'utf8'} |
|
385 | 'mysql_charset': 'utf8'} | |
386 | ) |
|
386 | ) | |
387 | __mapper_args__ = {} |
|
387 | __mapper_args__ = {} | |
388 |
|
388 | |||
389 | ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
389 | ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
390 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) |
|
390 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) | |
391 | ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None) |
|
391 | ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None) | |
392 | active = Column("active", Boolean(), nullable=True, unique=None, default=True) |
|
392 | active = Column("active", Boolean(), nullable=True, unique=None, default=True) | |
393 | user = relationship('User', lazy='joined') |
|
393 | user = relationship('User', lazy='joined') | |
394 |
|
394 | |||
395 |
|
395 | |||
396 | class UserLog(Base, BaseModel): |
|
396 | class UserLog(Base, BaseModel): | |
397 | __tablename__ = 'user_logs' |
|
397 | __tablename__ = 'user_logs' | |
398 | __table_args__ = ( |
|
398 | __table_args__ = ( | |
399 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
399 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
400 | 'mysql_charset': 'utf8'}, |
|
400 | 'mysql_charset': 'utf8'}, | |
401 | ) |
|
401 | ) | |
402 | user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
402 | user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
403 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) |
|
403 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) | |
404 | username = Column("username", String(255), nullable=True, unique=None, default=None) |
|
404 | username = Column("username", String(255), nullable=True, unique=None, default=None) | |
405 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True) |
|
405 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True) | |
406 | repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None) |
|
406 | repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None) | |
407 | user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None) |
|
407 | user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None) | |
408 | action = Column("action", String(1200000), nullable=True, unique=None, default=None) |
|
408 | action = Column("action", String(1200000), nullable=True, unique=None, default=None) | |
409 | action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None) |
|
409 | action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None) | |
410 |
|
410 | |||
411 |
|
411 | |||
412 | user = relationship('User') |
|
412 | user = relationship('User') | |
413 | repository = relationship('Repository', cascade='') |
|
413 | repository = relationship('Repository', cascade='') | |
414 |
|
414 | |||
415 |
|
415 | |||
416 | class UserGroup(Base, BaseModel): |
|
416 | class UserGroup(Base, BaseModel): | |
417 | __tablename__ = 'users_groups' |
|
417 | __tablename__ = 'users_groups' | |
418 | __table_args__ = ( |
|
418 | __table_args__ = ( | |
419 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
419 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
420 | 'mysql_charset': 'utf8'}, |
|
420 | 'mysql_charset': 'utf8'}, | |
421 | ) |
|
421 | ) | |
422 |
|
422 | |||
423 | users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
423 | users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
424 | users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None) |
|
424 | users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None) | |
425 | users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None) |
|
425 | users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None) | |
426 | inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True) |
|
426 | inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True) | |
427 |
|
427 | |||
428 | members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined") |
|
428 | members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined") | |
429 | users_group_to_perm = relationship('UserGroupToPerm', cascade='all') |
|
429 | users_group_to_perm = relationship('UserGroupToPerm', cascade='all') | |
430 | users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all') |
|
430 | users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all') | |
431 |
|
431 | |||
432 | def __unicode__(self): |
|
432 | def __unicode__(self): | |
433 | return u'<userGroup(%s)>' % (self.users_group_name) |
|
433 | return u'<userGroup(%s)>' % (self.users_group_name) | |
434 |
|
434 | |||
435 | @classmethod |
|
435 | @classmethod | |
436 | def get_by_group_name(cls, group_name, cache=False, |
|
436 | def get_by_group_name(cls, group_name, cache=False, | |
437 | case_insensitive=False): |
|
437 | case_insensitive=False): | |
438 | if case_insensitive: |
|
438 | if case_insensitive: | |
439 | q = cls.query().filter(cls.users_group_name.ilike(group_name)) |
|
439 | q = cls.query().filter(cls.users_group_name.ilike(group_name)) | |
440 | else: |
|
440 | else: | |
441 | q = cls.query().filter(cls.users_group_name == group_name) |
|
441 | q = cls.query().filter(cls.users_group_name == group_name) | |
442 | if cache: |
|
442 | if cache: | |
443 | q = q.options(FromCache( |
|
443 | q = q.options(FromCache( | |
444 | "sql_cache_short", |
|
444 | "sql_cache_short", | |
445 | "get_user_%s" % _hash_key(group_name) |
|
445 | "get_user_%s" % _hash_key(group_name) | |
446 | ) |
|
446 | ) | |
447 | ) |
|
447 | ) | |
448 | return q.scalar() |
|
448 | return q.scalar() | |
449 |
|
449 | |||
450 | @classmethod |
|
450 | @classmethod | |
451 | def get(cls, users_group_id, cache=False): |
|
451 | def get(cls, users_group_id, cache=False): | |
452 | user_group = cls.query() |
|
452 | user_group = cls.query() | |
453 | if cache: |
|
453 | if cache: | |
454 | user_group = user_group.options(FromCache("sql_cache_short", |
|
454 | user_group = user_group.options(FromCache("sql_cache_short", | |
455 | "get_users_group_%s" % users_group_id)) |
|
455 | "get_users_group_%s" % users_group_id)) | |
456 | return user_group.get(users_group_id) |
|
456 | return user_group.get(users_group_id) | |
457 |
|
457 | |||
458 |
|
458 | |||
459 | class UserGroupMember(Base, BaseModel): |
|
459 | class UserGroupMember(Base, BaseModel): | |
460 | __tablename__ = 'users_groups_members' |
|
460 | __tablename__ = 'users_groups_members' | |
461 | __table_args__ = ( |
|
461 | __table_args__ = ( | |
462 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
462 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
463 | 'mysql_charset': 'utf8'}, |
|
463 | 'mysql_charset': 'utf8'}, | |
464 | ) |
|
464 | ) | |
465 |
|
465 | |||
466 | users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
466 | users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
467 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
|
467 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) | |
468 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
468 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) | |
469 |
|
469 | |||
470 | user = relationship('User', lazy='joined') |
|
470 | user = relationship('User', lazy='joined') | |
471 | users_group = relationship('UserGroup') |
|
471 | users_group = relationship('UserGroup') | |
472 |
|
472 | |||
473 | def __init__(self, gr_id='', u_id=''): |
|
473 | def __init__(self, gr_id='', u_id=''): | |
474 | self.users_group_id = gr_id |
|
474 | self.users_group_id = gr_id | |
475 | self.user_id = u_id |
|
475 | self.user_id = u_id | |
476 |
|
476 | |||
477 |
|
477 | |||
478 | class RepositoryField(Base, BaseModel): |
|
478 | class RepositoryField(Base, BaseModel): | |
479 | __tablename__ = 'repositories_fields' |
|
479 | __tablename__ = 'repositories_fields' | |
480 | __table_args__ = ( |
|
480 | __table_args__ = ( | |
481 | UniqueConstraint('repository_id', 'field_key'), # no-multi field |
|
481 | UniqueConstraint('repository_id', 'field_key'), # no-multi field | |
482 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
482 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
483 | 'mysql_charset': 'utf8'}, |
|
483 | 'mysql_charset': 'utf8'}, | |
484 | ) |
|
484 | ) | |
485 | PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields |
|
485 | PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields | |
486 |
|
486 | |||
487 | repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
487 | repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
488 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) |
|
488 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) | |
489 | field_key = Column("field_key", String(250)) |
|
489 | field_key = Column("field_key", String(250)) | |
490 | field_label = Column("field_label", String(1024), nullable=False) |
|
490 | field_label = Column("field_label", String(1024), nullable=False) | |
491 | field_value = Column("field_value", String(10000), nullable=False) |
|
491 | field_value = Column("field_value", String(10000), nullable=False) | |
492 | field_desc = Column("field_desc", String(1024), nullable=False) |
|
492 | field_desc = Column("field_desc", String(1024), nullable=False) | |
493 | field_type = Column("field_type", String(256), nullable=False, unique=None) |
|
493 | field_type = Column("field_type", String(256), nullable=False, unique=None) | |
494 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
494 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | |
495 |
|
495 | |||
496 | repository = relationship('Repository') |
|
496 | repository = relationship('Repository') | |
497 |
|
497 | |||
498 | @classmethod |
|
498 | @classmethod | |
499 | def get_by_key_name(cls, key, repo): |
|
499 | def get_by_key_name(cls, key, repo): | |
500 | row = cls.query()\ |
|
500 | row = cls.query()\ | |
501 | .filter(cls.repository == repo)\ |
|
501 | .filter(cls.repository == repo)\ | |
502 | .filter(cls.field_key == key).scalar() |
|
502 | .filter(cls.field_key == key).scalar() | |
503 | return row |
|
503 | return row | |
504 |
|
504 | |||
505 |
|
505 | |||
506 | class Repository(Base, BaseModel): |
|
506 | class Repository(Base, BaseModel): | |
507 | __tablename__ = 'repositories' |
|
507 | __tablename__ = 'repositories' | |
508 | __table_args__ = ( |
|
508 | __table_args__ = ( | |
509 | UniqueConstraint('repo_name'), |
|
509 | UniqueConstraint('repo_name'), | |
510 | Index('r_repo_name_idx', 'repo_name'), |
|
510 | Index('r_repo_name_idx', 'repo_name'), | |
511 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
511 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
512 | 'mysql_charset': 'utf8'}, |
|
512 | 'mysql_charset': 'utf8'}, | |
513 | ) |
|
513 | ) | |
514 |
|
514 | |||
515 | repo_id = Column("repo_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
515 | repo_id = Column("repo_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
516 | repo_name = Column("repo_name", String(255), nullable=False, unique=True, default=None) |
|
516 | repo_name = Column("repo_name", String(255), nullable=False, unique=True, default=None) | |
517 | clone_uri = Column("clone_uri", String(255), nullable=True, unique=False, default=None) |
|
517 | clone_uri = Column("clone_uri", String(255), nullable=True, unique=False, default=None) | |
518 | repo_type = Column("repo_type", String(255), nullable=False, unique=False, default=None) |
|
518 | repo_type = Column("repo_type", String(255), nullable=False, unique=False, default=None) | |
519 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None) |
|
519 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None) | |
520 | private = Column("private", Boolean(), nullable=True, unique=None, default=None) |
|
520 | private = Column("private", Boolean(), nullable=True, unique=None, default=None) | |
521 | enable_statistics = Column("statistics", Boolean(), nullable=True, unique=None, default=True) |
|
521 | enable_statistics = Column("statistics", Boolean(), nullable=True, unique=None, default=True) | |
522 | enable_downloads = Column("downloads", Boolean(), nullable=True, unique=None, default=True) |
|
522 | enable_downloads = Column("downloads", Boolean(), nullable=True, unique=None, default=True) | |
523 | description = Column("description", String(10000), nullable=True, unique=None, default=None) |
|
523 | description = Column("description", String(10000), nullable=True, unique=None, default=None) | |
524 | created_on = Column('created_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now) |
|
524 | created_on = Column('created_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now) | |
525 | updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now) |
|
525 | updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now) | |
526 | landing_rev = Column("landing_revision", String(255), nullable=False, unique=False, default=None) |
|
526 | landing_rev = Column("landing_revision", String(255), nullable=False, unique=False, default=None) | |
527 | enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False) |
|
527 | enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False) | |
528 | _locked = Column("locked", String(255), nullable=True, unique=False, default=None) |
|
528 | _locked = Column("locked", String(255), nullable=True, unique=False, default=None) | |
529 | _changeset_cache = Column("changeset_cache", LargeBinary(), nullable=True) #JSON data |
|
529 | _changeset_cache = Column("changeset_cache", LargeBinary(), nullable=True) #JSON data | |
530 |
|
530 | |||
531 | fork_id = Column("fork_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=False, default=None) |
|
531 | fork_id = Column("fork_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=False, default=None) | |
532 | group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=False, default=None) |
|
532 | group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=False, default=None) | |
533 |
|
533 | |||
534 | user = relationship('User') |
|
534 | user = relationship('User') | |
535 | fork = relationship('Repository', remote_side=repo_id) |
|
535 | fork = relationship('Repository', remote_side=repo_id) | |
536 | group = relationship('RepoGroup') |
|
536 | group = relationship('RepoGroup') | |
537 | repo_to_perm = relationship('UserRepoToPerm', cascade='all', order_by='UserRepoToPerm.repo_to_perm_id') |
|
537 | repo_to_perm = relationship('UserRepoToPerm', cascade='all', order_by='UserRepoToPerm.repo_to_perm_id') | |
538 | users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all') |
|
538 | users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all') | |
539 | stats = relationship('Statistics', cascade='all', uselist=False) |
|
539 | stats = relationship('Statistics', cascade='all', uselist=False) | |
540 |
|
540 | |||
541 | followers = relationship('UserFollowing', |
|
541 | followers = relationship('UserFollowing', | |
542 | primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id', |
|
542 | primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id', | |
543 | cascade='all') |
|
543 | cascade='all') | |
544 | extra_fields = relationship('RepositoryField', |
|
544 | extra_fields = relationship('RepositoryField', | |
545 | cascade="all, delete, delete-orphan") |
|
545 | cascade="all, delete, delete-orphan") | |
546 |
|
546 | |||
547 | logs = relationship('UserLog') |
|
547 | logs = relationship('UserLog') | |
548 | comments = relationship('ChangesetComment', cascade="all, delete, delete-orphan") |
|
548 | comments = relationship('ChangesetComment', cascade="all, delete, delete-orphan") | |
549 |
|
549 | |||
550 | pull_requests_org = relationship('PullRequest', |
|
550 | pull_requests_org = relationship('PullRequest', | |
551 | primaryjoin='PullRequest.org_repo_id==Repository.repo_id', |
|
551 | primaryjoin='PullRequest.org_repo_id==Repository.repo_id', | |
552 | cascade="all, delete, delete-orphan") |
|
552 | cascade="all, delete, delete-orphan") | |
553 |
|
553 | |||
554 | pull_requests_other = relationship('PullRequest', |
|
554 | pull_requests_other = relationship('PullRequest', | |
555 | primaryjoin='PullRequest.other_repo_id==Repository.repo_id', |
|
555 | primaryjoin='PullRequest.other_repo_id==Repository.repo_id', | |
556 | cascade="all, delete, delete-orphan") |
|
556 | cascade="all, delete, delete-orphan") | |
557 |
|
557 | |||
558 | def __unicode__(self): |
|
558 | def __unicode__(self): | |
559 | return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id, |
|
559 | return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id, | |
560 | safe_unicode(self.repo_name)) |
|
560 | safe_unicode(self.repo_name)) | |
561 |
|
561 | |||
562 | #NOTE for this migration we are required tio have it |
|
562 | #NOTE for this migration we are required tio have it | |
563 | @hybrid_property |
|
563 | @hybrid_property | |
564 | def changeset_cache(self): |
|
564 | def changeset_cache(self): | |
565 | from rhodecode.lib.vcs.backends.base import EmptyCommit |
|
565 | from rhodecode.lib.vcs.backends.base import EmptyCommit | |
566 | dummy = EmptyCommit().__json__() |
|
566 | dummy = EmptyCommit().__json__() | |
567 | if not self._changeset_cache: |
|
567 | if not self._changeset_cache: | |
568 | return dummy |
|
568 | return dummy | |
569 | try: |
|
569 | try: | |
570 | return json.loads(self._changeset_cache) |
|
570 | return json.loads(self._changeset_cache) | |
571 | except TypeError: |
|
571 | except TypeError: | |
572 | return dummy |
|
572 | return dummy | |
573 |
|
573 | |||
574 | @changeset_cache.setter |
|
574 | @changeset_cache.setter | |
575 | def changeset_cache(self, val): |
|
575 | def changeset_cache(self, val): | |
576 | try: |
|
576 | try: | |
577 | self._changeset_cache = json.dumps(val) |
|
577 | self._changeset_cache = json.dumps(val) | |
578 | except Exception: |
|
578 | except Exception: | |
579 | log.error(traceback.format_exc()) |
|
579 | log.error(traceback.format_exc()) | |
580 |
|
580 | |||
581 | @classmethod |
|
581 | @classmethod | |
582 | def get_by_repo_name(cls, repo_name): |
|
582 | def get_by_repo_name(cls, repo_name): | |
583 | q = Session().query(cls).filter(cls.repo_name == repo_name) |
|
583 | q = Session().query(cls).filter(cls.repo_name == repo_name) | |
584 | q = q.options(joinedload(Repository.fork))\ |
|
584 | q = q.options(joinedload(Repository.fork))\ | |
585 | .options(joinedload(Repository.user))\ |
|
585 | .options(joinedload(Repository.user))\ | |
586 | .options(joinedload(Repository.group)) |
|
586 | .options(joinedload(Repository.group)) | |
587 | return q.scalar() |
|
587 | return q.scalar() | |
588 |
|
588 | |||
589 | #NOTE this is required for this migration to work |
|
589 | #NOTE this is required for this migration to work | |
590 | def update_commit_cache(self, cs_cache=None): |
|
590 | def update_commit_cache(self, cs_cache=None): | |
591 | """ |
|
591 | """ | |
592 | Update cache of last changeset for repository, keys should be:: |
|
592 | Update cache of last changeset for repository, keys should be:: | |
593 |
|
593 | |||
594 | short_id |
|
594 | short_id | |
595 | raw_id |
|
595 | raw_id | |
596 | revision |
|
596 | revision | |
597 | message |
|
597 | message | |
598 | date |
|
598 | date | |
599 | author |
|
599 | author | |
600 |
|
600 | |||
601 | :param cs_cache: |
|
601 | :param cs_cache: | |
602 | """ |
|
602 | """ | |
603 | from rhodecode.lib.vcs.backends.base import BaseChangeset |
|
603 | from rhodecode.lib.vcs.backends.base import BaseChangeset | |
604 | if cs_cache is None: |
|
604 | if cs_cache is None: | |
605 | cs_cache = EmptyCommit() |
|
605 | cs_cache = EmptyCommit() | |
606 | # Note: Using always the empty commit here in case we are |
|
606 | # Note: Using always the empty commit here in case we are | |
607 | # upgrading towards version 3.0 and above. Reason is that in this |
|
607 | # upgrading towards version 3.0 and above. Reason is that in this | |
608 | # case the vcsclient connection is not available and things |
|
608 | # case the vcsclient connection is not available and things | |
609 | # would explode here. |
|
609 | # would explode here. | |
610 |
|
610 | |||
611 | if isinstance(cs_cache, BaseChangeset): |
|
611 | if isinstance(cs_cache, BaseChangeset): | |
612 | cs_cache = cs_cache.__json__() |
|
612 | cs_cache = cs_cache.__json__() | |
613 |
|
613 | |||
614 | if (cs_cache != self.changeset_cache or not self.changeset_cache): |
|
614 | if (cs_cache != self.changeset_cache or not self.changeset_cache): | |
615 | _default = datetime.datetime.fromtimestamp(0) |
|
615 | _default = datetime.datetime.fromtimestamp(0) | |
616 | last_change = cs_cache.get('date') or _default |
|
616 | last_change = cs_cache.get('date') or _default | |
617 |
log.debug('updated repo %s with new c |
|
617 | log.debug('updated repo %s with new commit cache %s', self.repo_name, cs_cache) | |
618 | self.updated_on = last_change |
|
618 | self.updated_on = last_change | |
619 | self.changeset_cache = cs_cache |
|
619 | self.changeset_cache = cs_cache | |
620 | Session().add(self) |
|
620 | Session().add(self) | |
621 | Session().commit() |
|
621 | Session().commit() | |
622 | else: |
|
622 | else: | |
623 | log.debug('Skipping repo:%s already with latest changes', self.repo_name) |
|
623 | log.debug('Skipping repo:%s already with latest changes', self.repo_name) | |
624 |
|
624 | |||
625 | class RepoGroup(Base, BaseModel): |
|
625 | class RepoGroup(Base, BaseModel): | |
626 | __tablename__ = 'groups' |
|
626 | __tablename__ = 'groups' | |
627 | __table_args__ = ( |
|
627 | __table_args__ = ( | |
628 | UniqueConstraint('group_name', 'group_parent_id'), |
|
628 | UniqueConstraint('group_name', 'group_parent_id'), | |
629 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
629 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
630 | 'mysql_charset': 'utf8'}, |
|
630 | 'mysql_charset': 'utf8'}, | |
631 | ) |
|
631 | ) | |
632 | __mapper_args__ = {'order_by': 'group_name'} |
|
632 | __mapper_args__ = {'order_by': 'group_name'} | |
633 |
|
633 | |||
634 | group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
634 | group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
635 | group_name = Column("group_name", String(255), nullable=False, unique=True, default=None) |
|
635 | group_name = Column("group_name", String(255), nullable=False, unique=True, default=None) | |
636 | group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None) |
|
636 | group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None) | |
637 | group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None) |
|
637 | group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None) | |
638 | enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False) |
|
638 | enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False) | |
639 |
|
639 | |||
640 | repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id') |
|
640 | repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id') | |
641 | users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all') |
|
641 | users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all') | |
642 | parent_group = relationship('RepoGroup', remote_side=group_id) |
|
642 | parent_group = relationship('RepoGroup', remote_side=group_id) | |
643 |
|
643 | |||
644 | def __init__(self, group_name='', parent_group=None): |
|
644 | def __init__(self, group_name='', parent_group=None): | |
645 | self.group_name = group_name |
|
645 | self.group_name = group_name | |
646 | self.parent_group = parent_group |
|
646 | self.parent_group = parent_group | |
647 |
|
647 | |||
648 | def __unicode__(self): |
|
648 | def __unicode__(self): | |
649 | return u"<%s('%s:%s')>" % (self.__class__.__name__, self.group_id, |
|
649 | return u"<%s('%s:%s')>" % (self.__class__.__name__, self.group_id, | |
650 | self.group_name) |
|
650 | self.group_name) | |
651 |
|
651 | |||
652 | @classmethod |
|
652 | @classmethod | |
653 | def url_sep(cls): |
|
653 | def url_sep(cls): | |
654 | return URL_SEP |
|
654 | return URL_SEP | |
655 |
|
655 | |||
656 | @classmethod |
|
656 | @classmethod | |
657 | def get_by_group_name(cls, group_name, cache=False, case_insensitive=False): |
|
657 | def get_by_group_name(cls, group_name, cache=False, case_insensitive=False): | |
658 | if case_insensitive: |
|
658 | if case_insensitive: | |
659 | gr = cls.query()\ |
|
659 | gr = cls.query()\ | |
660 | .filter(cls.group_name.ilike(group_name)) |
|
660 | .filter(cls.group_name.ilike(group_name)) | |
661 | else: |
|
661 | else: | |
662 | gr = cls.query()\ |
|
662 | gr = cls.query()\ | |
663 | .filter(cls.group_name == group_name) |
|
663 | .filter(cls.group_name == group_name) | |
664 | if cache: |
|
664 | if cache: | |
665 | gr = gr.options(FromCache( |
|
665 | gr = gr.options(FromCache( | |
666 | "sql_cache_short", |
|
666 | "sql_cache_short", | |
667 | "get_group_%s" % _hash_key(group_name) |
|
667 | "get_group_%s" % _hash_key(group_name) | |
668 | ) |
|
668 | ) | |
669 | ) |
|
669 | ) | |
670 | return gr.scalar() |
|
670 | return gr.scalar() | |
671 |
|
671 | |||
672 |
|
672 | |||
673 | class Permission(Base, BaseModel): |
|
673 | class Permission(Base, BaseModel): | |
674 | __tablename__ = 'permissions' |
|
674 | __tablename__ = 'permissions' | |
675 | __table_args__ = ( |
|
675 | __table_args__ = ( | |
676 | Index('p_perm_name_idx', 'permission_name'), |
|
676 | Index('p_perm_name_idx', 'permission_name'), | |
677 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
677 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
678 | 'mysql_charset': 'utf8'}, |
|
678 | 'mysql_charset': 'utf8'}, | |
679 | ) |
|
679 | ) | |
680 | PERMS = [ |
|
680 | PERMS = [ | |
681 | ('repository.none', _('Repository no access')), |
|
681 | ('repository.none', _('Repository no access')), | |
682 | ('repository.read', _('Repository read access')), |
|
682 | ('repository.read', _('Repository read access')), | |
683 | ('repository.write', _('Repository write access')), |
|
683 | ('repository.write', _('Repository write access')), | |
684 | ('repository.admin', _('Repository admin access')), |
|
684 | ('repository.admin', _('Repository admin access')), | |
685 |
|
685 | |||
686 | ('group.none', _('Repository group no access')), |
|
686 | ('group.none', _('Repository group no access')), | |
687 | ('group.read', _('Repository group read access')), |
|
687 | ('group.read', _('Repository group read access')), | |
688 | ('group.write', _('Repository group write access')), |
|
688 | ('group.write', _('Repository group write access')), | |
689 | ('group.admin', _('Repository group admin access')), |
|
689 | ('group.admin', _('Repository group admin access')), | |
690 |
|
690 | |||
691 | ('hg.admin', _('RhodeCode Administrator')), |
|
691 | ('hg.admin', _('RhodeCode Administrator')), | |
692 | ('hg.create.none', _('Repository creation disabled')), |
|
692 | ('hg.create.none', _('Repository creation disabled')), | |
693 | ('hg.create.repository', _('Repository creation enabled')), |
|
693 | ('hg.create.repository', _('Repository creation enabled')), | |
694 | ('hg.fork.none', _('Repository forking disabled')), |
|
694 | ('hg.fork.none', _('Repository forking disabled')), | |
695 | ('hg.fork.repository', _('Repository forking enabled')), |
|
695 | ('hg.fork.repository', _('Repository forking enabled')), | |
696 | ('hg.register.none', _('Register disabled')), |
|
696 | ('hg.register.none', _('Register disabled')), | |
697 | ('hg.register.manual_activate', _('Register new user with RhodeCode ' |
|
697 | ('hg.register.manual_activate', _('Register new user with RhodeCode ' | |
698 | 'with manual activation')), |
|
698 | 'with manual activation')), | |
699 |
|
699 | |||
700 | ('hg.register.auto_activate', _('Register new user with RhodeCode ' |
|
700 | ('hg.register.auto_activate', _('Register new user with RhodeCode ' | |
701 | 'with auto activation')), |
|
701 | 'with auto activation')), | |
702 | ] |
|
702 | ] | |
703 |
|
703 | |||
704 | # defines which permissions are more important higher the more important |
|
704 | # defines which permissions are more important higher the more important | |
705 | PERM_WEIGHTS = { |
|
705 | PERM_WEIGHTS = { | |
706 | 'repository.none': 0, |
|
706 | 'repository.none': 0, | |
707 | 'repository.read': 1, |
|
707 | 'repository.read': 1, | |
708 | 'repository.write': 3, |
|
708 | 'repository.write': 3, | |
709 | 'repository.admin': 4, |
|
709 | 'repository.admin': 4, | |
710 |
|
710 | |||
711 | 'group.none': 0, |
|
711 | 'group.none': 0, | |
712 | 'group.read': 1, |
|
712 | 'group.read': 1, | |
713 | 'group.write': 3, |
|
713 | 'group.write': 3, | |
714 | 'group.admin': 4, |
|
714 | 'group.admin': 4, | |
715 |
|
715 | |||
716 | 'hg.fork.none': 0, |
|
716 | 'hg.fork.none': 0, | |
717 | 'hg.fork.repository': 1, |
|
717 | 'hg.fork.repository': 1, | |
718 | 'hg.create.none': 0, |
|
718 | 'hg.create.none': 0, | |
719 | 'hg.create.repository':1 |
|
719 | 'hg.create.repository':1 | |
720 | } |
|
720 | } | |
721 |
|
721 | |||
722 | permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
722 | permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
723 | permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None) |
|
723 | permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None) | |
724 | permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None) |
|
724 | permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None) | |
725 |
|
725 | |||
726 | def __unicode__(self): |
|
726 | def __unicode__(self): | |
727 | return u"<%s('%s:%s')>" % ( |
|
727 | return u"<%s('%s:%s')>" % ( | |
728 | self.__class__.__name__, self.permission_id, self.permission_name |
|
728 | self.__class__.__name__, self.permission_id, self.permission_name | |
729 | ) |
|
729 | ) | |
730 |
|
730 | |||
731 | @classmethod |
|
731 | @classmethod | |
732 | def get_by_key(cls, key): |
|
732 | def get_by_key(cls, key): | |
733 | return cls.query().filter(cls.permission_name == key).scalar() |
|
733 | return cls.query().filter(cls.permission_name == key).scalar() | |
734 |
|
734 | |||
735 |
|
735 | |||
736 | class UserRepoToPerm(Base, BaseModel): |
|
736 | class UserRepoToPerm(Base, BaseModel): | |
737 | __tablename__ = 'repo_to_perm' |
|
737 | __tablename__ = 'repo_to_perm' | |
738 | __table_args__ = ( |
|
738 | __table_args__ = ( | |
739 | UniqueConstraint('user_id', 'repository_id', 'permission_id'), |
|
739 | UniqueConstraint('user_id', 'repository_id', 'permission_id'), | |
740 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
740 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
741 | 'mysql_charset': 'utf8'} |
|
741 | 'mysql_charset': 'utf8'} | |
742 | ) |
|
742 | ) | |
743 | repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
743 | repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
744 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
744 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) | |
745 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
745 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) | |
746 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) |
|
746 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) | |
747 |
|
747 | |||
748 | user = relationship('User') |
|
748 | user = relationship('User') | |
749 | repository = relationship('Repository') |
|
749 | repository = relationship('Repository') | |
750 | permission = relationship('Permission') |
|
750 | permission = relationship('Permission') | |
751 |
|
751 | |||
752 | def __unicode__(self): |
|
752 | def __unicode__(self): | |
753 | return u'<user:%s => %s >' % (self.user, self.repository) |
|
753 | return u'<user:%s => %s >' % (self.user, self.repository) | |
754 |
|
754 | |||
755 |
|
755 | |||
756 | class UserToPerm(Base, BaseModel): |
|
756 | class UserToPerm(Base, BaseModel): | |
757 | __tablename__ = 'user_to_perm' |
|
757 | __tablename__ = 'user_to_perm' | |
758 | __table_args__ = ( |
|
758 | __table_args__ = ( | |
759 | UniqueConstraint('user_id', 'permission_id'), |
|
759 | UniqueConstraint('user_id', 'permission_id'), | |
760 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
760 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
761 | 'mysql_charset': 'utf8'} |
|
761 | 'mysql_charset': 'utf8'} | |
762 | ) |
|
762 | ) | |
763 | user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
763 | user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
764 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
764 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) | |
765 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
765 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) | |
766 |
|
766 | |||
767 | user = relationship('User') |
|
767 | user = relationship('User') | |
768 | permission = relationship('Permission', lazy='joined') |
|
768 | permission = relationship('Permission', lazy='joined') | |
769 |
|
769 | |||
770 |
|
770 | |||
771 | class UserGroupRepoToPerm(Base, BaseModel): |
|
771 | class UserGroupRepoToPerm(Base, BaseModel): | |
772 | __tablename__ = 'users_group_repo_to_perm' |
|
772 | __tablename__ = 'users_group_repo_to_perm' | |
773 | __table_args__ = ( |
|
773 | __table_args__ = ( | |
774 | UniqueConstraint('repository_id', 'users_group_id', 'permission_id'), |
|
774 | UniqueConstraint('repository_id', 'users_group_id', 'permission_id'), | |
775 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
775 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
776 | 'mysql_charset': 'utf8'} |
|
776 | 'mysql_charset': 'utf8'} | |
777 | ) |
|
777 | ) | |
778 | users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
778 | users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
779 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
|
779 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) | |
780 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
780 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) | |
781 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) |
|
781 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) | |
782 |
|
782 | |||
783 | users_group = relationship('UserGroup') |
|
783 | users_group = relationship('UserGroup') | |
784 | permission = relationship('Permission') |
|
784 | permission = relationship('Permission') | |
785 | repository = relationship('Repository') |
|
785 | repository = relationship('Repository') | |
786 |
|
786 | |||
787 | def __unicode__(self): |
|
787 | def __unicode__(self): | |
788 | return u'<userGroup:%s => %s >' % (self.users_group, self.repository) |
|
788 | return u'<userGroup:%s => %s >' % (self.users_group, self.repository) | |
789 |
|
789 | |||
790 |
|
790 | |||
791 | class UserGroupToPerm(Base, BaseModel): |
|
791 | class UserGroupToPerm(Base, BaseModel): | |
792 | __tablename__ = 'users_group_to_perm' |
|
792 | __tablename__ = 'users_group_to_perm' | |
793 | __table_args__ = ( |
|
793 | __table_args__ = ( | |
794 | UniqueConstraint('users_group_id', 'permission_id',), |
|
794 | UniqueConstraint('users_group_id', 'permission_id',), | |
795 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
795 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
796 | 'mysql_charset': 'utf8'} |
|
796 | 'mysql_charset': 'utf8'} | |
797 | ) |
|
797 | ) | |
798 | users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
798 | users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
799 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
|
799 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) | |
800 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
800 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) | |
801 |
|
801 | |||
802 | users_group = relationship('UserGroup') |
|
802 | users_group = relationship('UserGroup') | |
803 | permission = relationship('Permission') |
|
803 | permission = relationship('Permission') | |
804 |
|
804 | |||
805 |
|
805 | |||
806 | class UserRepoGroupToPerm(Base, BaseModel): |
|
806 | class UserRepoGroupToPerm(Base, BaseModel): | |
807 | __tablename__ = 'user_repo_group_to_perm' |
|
807 | __tablename__ = 'user_repo_group_to_perm' | |
808 | __table_args__ = ( |
|
808 | __table_args__ = ( | |
809 | UniqueConstraint('user_id', 'group_id', 'permission_id'), |
|
809 | UniqueConstraint('user_id', 'group_id', 'permission_id'), | |
810 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
810 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
811 | 'mysql_charset': 'utf8'} |
|
811 | 'mysql_charset': 'utf8'} | |
812 | ) |
|
812 | ) | |
813 |
|
813 | |||
814 | group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
814 | group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
815 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
815 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) | |
816 | group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None) |
|
816 | group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None) | |
817 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
817 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) | |
818 |
|
818 | |||
819 | user = relationship('User') |
|
819 | user = relationship('User') | |
820 | group = relationship('RepoGroup') |
|
820 | group = relationship('RepoGroup') | |
821 | permission = relationship('Permission') |
|
821 | permission = relationship('Permission') | |
822 |
|
822 | |||
823 |
|
823 | |||
824 | class UserGroupRepoGroupToPerm(Base, BaseModel): |
|
824 | class UserGroupRepoGroupToPerm(Base, BaseModel): | |
825 | __tablename__ = 'users_group_repo_group_to_perm' |
|
825 | __tablename__ = 'users_group_repo_group_to_perm' | |
826 | __table_args__ = ( |
|
826 | __table_args__ = ( | |
827 | UniqueConstraint('users_group_id', 'group_id'), |
|
827 | UniqueConstraint('users_group_id', 'group_id'), | |
828 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
828 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
829 | 'mysql_charset': 'utf8'} |
|
829 | 'mysql_charset': 'utf8'} | |
830 | ) |
|
830 | ) | |
831 |
|
831 | |||
832 | users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
832 | users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
833 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
|
833 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) | |
834 | group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None) |
|
834 | group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None) | |
835 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
835 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) | |
836 |
|
836 | |||
837 | users_group = relationship('UserGroup') |
|
837 | users_group = relationship('UserGroup') | |
838 | permission = relationship('Permission') |
|
838 | permission = relationship('Permission') | |
839 | group = relationship('RepoGroup') |
|
839 | group = relationship('RepoGroup') | |
840 |
|
840 | |||
841 |
|
841 | |||
842 | class Statistics(Base, BaseModel): |
|
842 | class Statistics(Base, BaseModel): | |
843 | __tablename__ = 'statistics' |
|
843 | __tablename__ = 'statistics' | |
844 | __table_args__ = ( |
|
844 | __table_args__ = ( | |
845 | UniqueConstraint('repository_id'), |
|
845 | UniqueConstraint('repository_id'), | |
846 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
846 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
847 | 'mysql_charset': 'utf8'} |
|
847 | 'mysql_charset': 'utf8'} | |
848 | ) |
|
848 | ) | |
849 | stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
849 | stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
850 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None) |
|
850 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None) | |
851 | stat_on_revision = Column("stat_on_revision", Integer(), nullable=False) |
|
851 | stat_on_revision = Column("stat_on_revision", Integer(), nullable=False) | |
852 | commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data |
|
852 | commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data | |
853 | commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data |
|
853 | commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data | |
854 | languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data |
|
854 | languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data | |
855 |
|
855 | |||
856 | repository = relationship('Repository', single_parent=True) |
|
856 | repository = relationship('Repository', single_parent=True) | |
857 |
|
857 | |||
858 |
|
858 | |||
859 | class UserFollowing(Base, BaseModel): |
|
859 | class UserFollowing(Base, BaseModel): | |
860 | __tablename__ = 'user_followings' |
|
860 | __tablename__ = 'user_followings' | |
861 | __table_args__ = ( |
|
861 | __table_args__ = ( | |
862 | UniqueConstraint('user_id', 'follows_repository_id'), |
|
862 | UniqueConstraint('user_id', 'follows_repository_id'), | |
863 | UniqueConstraint('user_id', 'follows_user_id'), |
|
863 | UniqueConstraint('user_id', 'follows_user_id'), | |
864 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
864 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
865 | 'mysql_charset': 'utf8'} |
|
865 | 'mysql_charset': 'utf8'} | |
866 | ) |
|
866 | ) | |
867 |
|
867 | |||
868 | user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
868 | user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
869 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
869 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) | |
870 | follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None) |
|
870 | follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None) | |
871 | follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) |
|
871 | follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) | |
872 | follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now) |
|
872 | follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now) | |
873 |
|
873 | |||
874 | user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id') |
|
874 | user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id') | |
875 |
|
875 | |||
876 | follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id') |
|
876 | follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id') | |
877 | follows_repository = relationship('Repository', order_by='Repository.repo_name') |
|
877 | follows_repository = relationship('Repository', order_by='Repository.repo_name') | |
878 |
|
878 | |||
879 |
|
879 | |||
880 | class CacheInvalidation(Base, BaseModel): |
|
880 | class CacheInvalidation(Base, BaseModel): | |
881 | __tablename__ = 'cache_invalidation' |
|
881 | __tablename__ = 'cache_invalidation' | |
882 | __table_args__ = ( |
|
882 | __table_args__ = ( | |
883 | UniqueConstraint('cache_key'), |
|
883 | UniqueConstraint('cache_key'), | |
884 | Index('key_idx', 'cache_key'), |
|
884 | Index('key_idx', 'cache_key'), | |
885 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
885 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
886 | 'mysql_charset': 'utf8'}, |
|
886 | 'mysql_charset': 'utf8'}, | |
887 | ) |
|
887 | ) | |
888 | cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
888 | cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
889 | cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None) |
|
889 | cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None) | |
890 | cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None) |
|
890 | cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None) | |
891 | cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False) |
|
891 | cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False) | |
892 |
|
892 | |||
893 | def __init__(self, cache_key, cache_args=''): |
|
893 | def __init__(self, cache_key, cache_args=''): | |
894 | self.cache_key = cache_key |
|
894 | self.cache_key = cache_key | |
895 | self.cache_args = cache_args |
|
895 | self.cache_args = cache_args | |
896 | self.cache_active = False |
|
896 | self.cache_active = False | |
897 |
|
897 | |||
898 |
|
898 | |||
899 | class ChangesetComment(Base, BaseModel): |
|
899 | class ChangesetComment(Base, BaseModel): | |
900 | __tablename__ = 'changeset_comments' |
|
900 | __tablename__ = 'changeset_comments' | |
901 | __table_args__ = ( |
|
901 | __table_args__ = ( | |
902 | Index('cc_revision_idx', 'revision'), |
|
902 | Index('cc_revision_idx', 'revision'), | |
903 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
903 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
904 | 'mysql_charset': 'utf8'}, |
|
904 | 'mysql_charset': 'utf8'}, | |
905 | ) |
|
905 | ) | |
906 | comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True) |
|
906 | comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True) | |
907 | repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False) |
|
907 | repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False) | |
908 | revision = Column('revision', String(40), nullable=True) |
|
908 | revision = Column('revision', String(40), nullable=True) | |
909 | pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True) |
|
909 | pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True) | |
910 | line_no = Column('line_no', Unicode(10), nullable=True) |
|
910 | line_no = Column('line_no', Unicode(10), nullable=True) | |
911 | hl_lines = Column('hl_lines', Unicode(512), nullable=True) |
|
911 | hl_lines = Column('hl_lines', Unicode(512), nullable=True) | |
912 | f_path = Column('f_path', Unicode(1000), nullable=True) |
|
912 | f_path = Column('f_path', Unicode(1000), nullable=True) | |
913 | user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False) |
|
913 | user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False) | |
914 | text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False) |
|
914 | text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False) | |
915 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
915 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | |
916 | modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
916 | modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | |
917 |
|
917 | |||
918 | author = relationship('User', lazy='joined') |
|
918 | author = relationship('User', lazy='joined') | |
919 | repo = relationship('Repository') |
|
919 | repo = relationship('Repository') | |
920 | status_change = relationship('ChangesetStatus', cascade="all, delete, delete-orphan") |
|
920 | status_change = relationship('ChangesetStatus', cascade="all, delete, delete-orphan") | |
921 | pull_request = relationship('PullRequest', lazy='joined') |
|
921 | pull_request = relationship('PullRequest', lazy='joined') | |
922 |
|
922 | |||
923 | @classmethod |
|
923 | @classmethod | |
924 | def get_users(cls, revision=None, pull_request_id=None): |
|
924 | def get_users(cls, revision=None, pull_request_id=None): | |
925 | """ |
|
925 | """ | |
926 | Returns user associated with this ChangesetComment. ie those |
|
926 | Returns user associated with this ChangesetComment. ie those | |
927 | who actually commented |
|
927 | who actually commented | |
928 |
|
928 | |||
929 | :param cls: |
|
929 | :param cls: | |
930 | :param revision: |
|
930 | :param revision: | |
931 | """ |
|
931 | """ | |
932 | q = Session().query(User)\ |
|
932 | q = Session().query(User)\ | |
933 | .join(ChangesetComment.author) |
|
933 | .join(ChangesetComment.author) | |
934 | if revision: |
|
934 | if revision: | |
935 | q = q.filter(cls.revision == revision) |
|
935 | q = q.filter(cls.revision == revision) | |
936 | elif pull_request_id: |
|
936 | elif pull_request_id: | |
937 | q = q.filter(cls.pull_request_id == pull_request_id) |
|
937 | q = q.filter(cls.pull_request_id == pull_request_id) | |
938 | return q.all() |
|
938 | return q.all() | |
939 |
|
939 | |||
940 |
|
940 | |||
941 | class ChangesetStatus(Base, BaseModel): |
|
941 | class ChangesetStatus(Base, BaseModel): | |
942 | __tablename__ = 'changeset_statuses' |
|
942 | __tablename__ = 'changeset_statuses' | |
943 | __table_args__ = ( |
|
943 | __table_args__ = ( | |
944 | Index('cs_revision_idx', 'revision'), |
|
944 | Index('cs_revision_idx', 'revision'), | |
945 | Index('cs_version_idx', 'version'), |
|
945 | Index('cs_version_idx', 'version'), | |
946 | UniqueConstraint('repo_id', 'revision', 'version'), |
|
946 | UniqueConstraint('repo_id', 'revision', 'version'), | |
947 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
947 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
948 | 'mysql_charset': 'utf8'} |
|
948 | 'mysql_charset': 'utf8'} | |
949 | ) |
|
949 | ) | |
950 | STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed' |
|
950 | STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed' | |
951 | STATUS_APPROVED = 'approved' |
|
951 | STATUS_APPROVED = 'approved' | |
952 | STATUS_REJECTED = 'rejected' |
|
952 | STATUS_REJECTED = 'rejected' | |
953 | STATUS_UNDER_REVIEW = 'under_review' |
|
953 | STATUS_UNDER_REVIEW = 'under_review' | |
954 |
|
954 | |||
955 | STATUSES = [ |
|
955 | STATUSES = [ | |
956 | (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default |
|
956 | (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default | |
957 | (STATUS_APPROVED, _("Approved")), |
|
957 | (STATUS_APPROVED, _("Approved")), | |
958 | (STATUS_REJECTED, _("Rejected")), |
|
958 | (STATUS_REJECTED, _("Rejected")), | |
959 | (STATUS_UNDER_REVIEW, _("Under Review")), |
|
959 | (STATUS_UNDER_REVIEW, _("Under Review")), | |
960 | ] |
|
960 | ] | |
961 |
|
961 | |||
962 | changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True) |
|
962 | changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True) | |
963 | repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False) |
|
963 | repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False) | |
964 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None) |
|
964 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None) | |
965 | revision = Column('revision', String(40), nullable=False) |
|
965 | revision = Column('revision', String(40), nullable=False) | |
966 | status = Column('status', String(128), nullable=False, default=DEFAULT) |
|
966 | status = Column('status', String(128), nullable=False, default=DEFAULT) | |
967 | changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id')) |
|
967 | changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id')) | |
968 | modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now) |
|
968 | modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now) | |
969 | version = Column('version', Integer(), nullable=False, default=0) |
|
969 | version = Column('version', Integer(), nullable=False, default=0) | |
970 | pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True) |
|
970 | pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True) | |
971 |
|
971 | |||
972 | author = relationship('User', lazy='joined') |
|
972 | author = relationship('User', lazy='joined') | |
973 | repo = relationship('Repository') |
|
973 | repo = relationship('Repository') | |
974 | comment = relationship('ChangesetComment', lazy='joined') |
|
974 | comment = relationship('ChangesetComment', lazy='joined') | |
975 | pull_request = relationship('PullRequest', lazy='joined') |
|
975 | pull_request = relationship('PullRequest', lazy='joined') | |
976 |
|
976 | |||
977 |
|
977 | |||
978 |
|
978 | |||
979 | class PullRequest(Base, BaseModel): |
|
979 | class PullRequest(Base, BaseModel): | |
980 | __tablename__ = 'pull_requests' |
|
980 | __tablename__ = 'pull_requests' | |
981 | __table_args__ = ( |
|
981 | __table_args__ = ( | |
982 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
982 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
983 | 'mysql_charset': 'utf8'}, |
|
983 | 'mysql_charset': 'utf8'}, | |
984 | ) |
|
984 | ) | |
985 |
|
985 | |||
986 | STATUS_NEW = u'new' |
|
986 | STATUS_NEW = u'new' | |
987 | STATUS_OPEN = u'open' |
|
987 | STATUS_OPEN = u'open' | |
988 | STATUS_CLOSED = u'closed' |
|
988 | STATUS_CLOSED = u'closed' | |
989 |
|
989 | |||
990 | pull_request_id = Column('pull_request_id', Integer(), nullable=False, primary_key=True) |
|
990 | pull_request_id = Column('pull_request_id', Integer(), nullable=False, primary_key=True) | |
991 | title = Column('title', Unicode(256), nullable=True) |
|
991 | title = Column('title', Unicode(256), nullable=True) | |
992 | description = Column('description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=True) |
|
992 | description = Column('description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=True) | |
993 | status = Column('status', Unicode(256), nullable=False, default=STATUS_NEW) |
|
993 | status = Column('status', Unicode(256), nullable=False, default=STATUS_NEW) | |
994 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
994 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | |
995 | updated_on = Column('updated_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
995 | updated_on = Column('updated_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | |
996 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None) |
|
996 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None) | |
997 | _revisions = Column('revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql')) |
|
997 | _revisions = Column('revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql')) | |
998 | org_repo_id = Column('org_repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False) |
|
998 | org_repo_id = Column('org_repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False) | |
999 | org_ref = Column('org_ref', Unicode(256), nullable=False) |
|
999 | org_ref = Column('org_ref', Unicode(256), nullable=False) | |
1000 | other_repo_id = Column('other_repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False) |
|
1000 | other_repo_id = Column('other_repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False) | |
1001 | other_ref = Column('other_ref', Unicode(256), nullable=False) |
|
1001 | other_ref = Column('other_ref', Unicode(256), nullable=False) | |
1002 |
|
1002 | |||
1003 | author = relationship('User', lazy='joined') |
|
1003 | author = relationship('User', lazy='joined') | |
1004 | reviewers = relationship('PullRequestReviewers', |
|
1004 | reviewers = relationship('PullRequestReviewers', | |
1005 | cascade="all, delete, delete-orphan") |
|
1005 | cascade="all, delete, delete-orphan") | |
1006 | org_repo = relationship('Repository', primaryjoin='PullRequest.org_repo_id==Repository.repo_id') |
|
1006 | org_repo = relationship('Repository', primaryjoin='PullRequest.org_repo_id==Repository.repo_id') | |
1007 | other_repo = relationship('Repository', primaryjoin='PullRequest.other_repo_id==Repository.repo_id') |
|
1007 | other_repo = relationship('Repository', primaryjoin='PullRequest.other_repo_id==Repository.repo_id') | |
1008 | statuses = relationship('ChangesetStatus') |
|
1008 | statuses = relationship('ChangesetStatus') | |
1009 | comments = relationship('ChangesetComment', |
|
1009 | comments = relationship('ChangesetComment', | |
1010 | cascade="all, delete, delete-orphan") |
|
1010 | cascade="all, delete, delete-orphan") | |
1011 |
|
1011 | |||
1012 |
|
1012 | |||
1013 | class PullRequestReviewers(Base, BaseModel): |
|
1013 | class PullRequestReviewers(Base, BaseModel): | |
1014 | __tablename__ = 'pull_request_reviewers' |
|
1014 | __tablename__ = 'pull_request_reviewers' | |
1015 | __table_args__ = ( |
|
1015 | __table_args__ = ( | |
1016 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
1016 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
1017 | 'mysql_charset': 'utf8'}, |
|
1017 | 'mysql_charset': 'utf8'}, | |
1018 | ) |
|
1018 | ) | |
1019 |
|
1019 | |||
1020 | def __init__(self, user=None, pull_request=None): |
|
1020 | def __init__(self, user=None, pull_request=None): | |
1021 | self.user = user |
|
1021 | self.user = user | |
1022 | self.pull_request = pull_request |
|
1022 | self.pull_request = pull_request | |
1023 |
|
1023 | |||
1024 | pull_requests_reviewers_id = Column('pull_requests_reviewers_id', Integer(), nullable=False, primary_key=True) |
|
1024 | pull_requests_reviewers_id = Column('pull_requests_reviewers_id', Integer(), nullable=False, primary_key=True) | |
1025 | pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=False) |
|
1025 | pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=False) | |
1026 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True) |
|
1026 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True) | |
1027 |
|
1027 | |||
1028 | user = relationship('User') |
|
1028 | user = relationship('User') | |
1029 | pull_request = relationship('PullRequest') |
|
1029 | pull_request = relationship('PullRequest') | |
1030 |
|
1030 | |||
1031 |
|
1031 | |||
1032 | class Notification(Base, BaseModel): |
|
1032 | class Notification(Base, BaseModel): | |
1033 | __tablename__ = 'notifications' |
|
1033 | __tablename__ = 'notifications' | |
1034 | __table_args__ = ( |
|
1034 | __table_args__ = ( | |
1035 | Index('notification_type_idx', 'type'), |
|
1035 | Index('notification_type_idx', 'type'), | |
1036 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
1036 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
1037 | 'mysql_charset': 'utf8'}, |
|
1037 | 'mysql_charset': 'utf8'}, | |
1038 | ) |
|
1038 | ) | |
1039 |
|
1039 | |||
1040 | TYPE_CHANGESET_COMMENT = u'cs_comment' |
|
1040 | TYPE_CHANGESET_COMMENT = u'cs_comment' | |
1041 | TYPE_MESSAGE = u'message' |
|
1041 | TYPE_MESSAGE = u'message' | |
1042 | TYPE_MENTION = u'mention' |
|
1042 | TYPE_MENTION = u'mention' | |
1043 | TYPE_REGISTRATION = u'registration' |
|
1043 | TYPE_REGISTRATION = u'registration' | |
1044 | TYPE_PULL_REQUEST = u'pull_request' |
|
1044 | TYPE_PULL_REQUEST = u'pull_request' | |
1045 | TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment' |
|
1045 | TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment' | |
1046 |
|
1046 | |||
1047 | notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True) |
|
1047 | notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True) | |
1048 | subject = Column('subject', Unicode(512), nullable=True) |
|
1048 | subject = Column('subject', Unicode(512), nullable=True) | |
1049 | body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True) |
|
1049 | body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True) | |
1050 | created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True) |
|
1050 | created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True) | |
1051 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
1051 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | |
1052 | type_ = Column('type', Unicode(256)) |
|
1052 | type_ = Column('type', Unicode(256)) | |
1053 |
|
1053 | |||
1054 | created_by_user = relationship('User') |
|
1054 | created_by_user = relationship('User') | |
1055 | notifications_to_users = relationship('UserNotification', lazy='joined', |
|
1055 | notifications_to_users = relationship('UserNotification', lazy='joined', | |
1056 | cascade="all, delete, delete-orphan") |
|
1056 | cascade="all, delete, delete-orphan") | |
1057 |
|
1057 | |||
1058 |
|
1058 | |||
1059 | class UserNotification(Base, BaseModel): |
|
1059 | class UserNotification(Base, BaseModel): | |
1060 | __tablename__ = 'user_to_notification' |
|
1060 | __tablename__ = 'user_to_notification' | |
1061 | __table_args__ = ( |
|
1061 | __table_args__ = ( | |
1062 | UniqueConstraint('user_id', 'notification_id'), |
|
1062 | UniqueConstraint('user_id', 'notification_id'), | |
1063 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
1063 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
1064 | 'mysql_charset': 'utf8'} |
|
1064 | 'mysql_charset': 'utf8'} | |
1065 | ) |
|
1065 | ) | |
1066 | user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True) |
|
1066 | user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True) | |
1067 | notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True) |
|
1067 | notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True) | |
1068 | read = Column('read', Boolean, default=False) |
|
1068 | read = Column('read', Boolean, default=False) | |
1069 | sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None) |
|
1069 | sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None) | |
1070 |
|
1070 | |||
1071 | user = relationship('User', lazy="joined") |
|
1071 | user = relationship('User', lazy="joined") | |
1072 | notification = relationship('Notification', lazy="joined", |
|
1072 | notification = relationship('Notification', lazy="joined", | |
1073 | order_by=lambda: Notification.created_on.desc(),) |
|
1073 | order_by=lambda: Notification.created_on.desc(),) | |
1074 |
|
1074 | |||
1075 |
|
1075 | |||
1076 | class DbMigrateVersion(Base, BaseModel): |
|
1076 | class DbMigrateVersion(Base, BaseModel): | |
1077 | __tablename__ = 'db_migrate_version' |
|
1077 | __tablename__ = 'db_migrate_version' | |
1078 | __table_args__ = ( |
|
1078 | __table_args__ = ( | |
1079 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
1079 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
1080 | 'mysql_charset': 'utf8'}, |
|
1080 | 'mysql_charset': 'utf8'}, | |
1081 | ) |
|
1081 | ) | |
1082 | repository_id = Column('repository_id', String(250), primary_key=True) |
|
1082 | repository_id = Column('repository_id', String(250), primary_key=True) | |
1083 | repository_path = Column('repository_path', Text) |
|
1083 | repository_path = Column('repository_path', Text) | |
1084 | version = Column('version', Integer) |
|
1084 | version = Column('version', Integer) |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
General Comments 0
You need to be logged in to leave comments.
Login now