Show More
The requested changes are too big and content was truncated. Show full diff
|
1 | NO CONTENT: new file 100644 | |
The requested commit or file is too big and content was truncated. Show full diff |
@@ -1,57 +1,57 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2019 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import os |
|
22 | 22 | import sys |
|
23 | 23 | import platform |
|
24 | 24 | |
|
25 | 25 | VERSION = tuple(open(os.path.join( |
|
26 | 26 | os.path.dirname(__file__), 'VERSION')).read().split('.')) |
|
27 | 27 | |
|
28 | 28 | BACKENDS = { |
|
29 | 29 | 'hg': 'Mercurial repository', |
|
30 | 30 | 'git': 'Git repository', |
|
31 | 31 | 'svn': 'Subversion repository', |
|
32 | 32 | } |
|
33 | 33 | |
|
34 | 34 | CELERY_ENABLED = False |
|
35 | 35 | CELERY_EAGER = False |
|
36 | 36 | |
|
37 | 37 | # link to config for pyramid |
|
38 | 38 | CONFIG = {} |
|
39 | 39 | |
|
40 | 40 | # Populated with the settings dictionary from application init in |
|
41 | 41 | # rhodecode.conf.environment.load_pyramid_environment |
|
42 | 42 | PYRAMID_SETTINGS = {} |
|
43 | 43 | |
|
44 | 44 | # Linked module for extensions |
|
45 | 45 | EXTENSIONS = {} |
|
46 | 46 | |
|
47 | 47 | __version__ = ('.'.join((str(each) for each in VERSION[:3]))) |
|
48 |
__dbversion__ = 9 |
|
|
48 | __dbversion__ = 98 # defines current db version for migrations | |
|
49 | 49 | __platform__ = platform.system() |
|
50 | 50 | __license__ = 'AGPLv3, and Commercial License' |
|
51 | 51 | __author__ = 'RhodeCode GmbH' |
|
52 | 52 | __url__ = 'https://code.rhodecode.com' |
|
53 | 53 | |
|
54 | 54 | is_windows = __platform__ in ['Windows'] |
|
55 | 55 | is_unix = not is_windows |
|
56 | 56 | is_test = False |
|
57 | 57 | disable_error_handler = False |
@@ -1,746 +1,747 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2016-2019 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import re |
|
22 | 22 | import logging |
|
23 | 23 | import collections |
|
24 | 24 | |
|
25 | 25 | from pyramid.view import view_config |
|
26 | 26 | |
|
27 | 27 | from rhodecode.apps._base import BaseAppView |
|
28 | 28 | from rhodecode.lib import helpers as h |
|
29 | 29 | from rhodecode.lib.auth import ( |
|
30 | 30 | LoginRequired, NotAnonymous, HasRepoGroupPermissionAnyDecorator, |
|
31 | 31 | CSRFRequired) |
|
32 | 32 | from rhodecode.lib.index import searcher_from_config |
|
33 | 33 | from rhodecode.lib.utils2 import safe_unicode, str2bool, safe_int |
|
34 | 34 | from rhodecode.lib.ext_json import json |
|
35 | 35 | from rhodecode.model.db import ( |
|
36 | 36 | func, true, or_, case, in_filter_generator, Repository, RepoGroup, User, UserGroup) |
|
37 | 37 | from rhodecode.model.repo import RepoModel |
|
38 | 38 | from rhodecode.model.repo_group import RepoGroupModel |
|
39 | 39 | from rhodecode.model.scm import RepoGroupList, RepoList |
|
40 | 40 | from rhodecode.model.user import UserModel |
|
41 | 41 | from rhodecode.model.user_group import UserGroupModel |
|
42 | 42 | |
|
43 | 43 | log = logging.getLogger(__name__) |
|
44 | 44 | |
|
45 | 45 | |
|
46 | 46 | class HomeView(BaseAppView): |
|
47 | 47 | |
|
48 | 48 | def load_default_context(self): |
|
49 | 49 | c = self._get_local_tmpl_context() |
|
50 | 50 | c.user = c.auth_user.get_instance() |
|
51 | 51 | |
|
52 | 52 | return c |
|
53 | 53 | |
|
54 | 54 | @LoginRequired() |
|
55 | 55 | @view_config( |
|
56 | 56 | route_name='user_autocomplete_data', request_method='GET', |
|
57 | 57 | renderer='json_ext', xhr=True) |
|
58 | 58 | def user_autocomplete_data(self): |
|
59 | 59 | self.load_default_context() |
|
60 | 60 | query = self.request.GET.get('query') |
|
61 | 61 | active = str2bool(self.request.GET.get('active') or True) |
|
62 | 62 | include_groups = str2bool(self.request.GET.get('user_groups')) |
|
63 | 63 | expand_groups = str2bool(self.request.GET.get('user_groups_expand')) |
|
64 | 64 | skip_default_user = str2bool(self.request.GET.get('skip_default_user')) |
|
65 | 65 | |
|
66 | 66 | log.debug('generating user list, query:%s, active:%s, with_groups:%s', |
|
67 | 67 | query, active, include_groups) |
|
68 | 68 | |
|
69 | 69 | _users = UserModel().get_users( |
|
70 | 70 | name_contains=query, only_active=active) |
|
71 | 71 | |
|
72 | 72 | def maybe_skip_default_user(usr): |
|
73 | 73 | if skip_default_user and usr['username'] == UserModel.cls.DEFAULT_USER: |
|
74 | 74 | return False |
|
75 | 75 | return True |
|
76 | 76 | _users = filter(maybe_skip_default_user, _users) |
|
77 | 77 | |
|
78 | 78 | if include_groups: |
|
79 | 79 | # extend with user groups |
|
80 | 80 | _user_groups = UserGroupModel().get_user_groups( |
|
81 | 81 | name_contains=query, only_active=active, |
|
82 | 82 | expand_groups=expand_groups) |
|
83 | 83 | _users = _users + _user_groups |
|
84 | 84 | |
|
85 | 85 | return {'suggestions': _users} |
|
86 | 86 | |
|
87 | 87 | @LoginRequired() |
|
88 | 88 | @NotAnonymous() |
|
89 | 89 | @view_config( |
|
90 | 90 | route_name='user_group_autocomplete_data', request_method='GET', |
|
91 | 91 | renderer='json_ext', xhr=True) |
|
92 | 92 | def user_group_autocomplete_data(self): |
|
93 | 93 | self.load_default_context() |
|
94 | 94 | query = self.request.GET.get('query') |
|
95 | 95 | active = str2bool(self.request.GET.get('active') or True) |
|
96 | 96 | expand_groups = str2bool(self.request.GET.get('user_groups_expand')) |
|
97 | 97 | |
|
98 | 98 | log.debug('generating user group list, query:%s, active:%s', |
|
99 | 99 | query, active) |
|
100 | 100 | |
|
101 | 101 | _user_groups = UserGroupModel().get_user_groups( |
|
102 | 102 | name_contains=query, only_active=active, |
|
103 | 103 | expand_groups=expand_groups) |
|
104 | 104 | _user_groups = _user_groups |
|
105 | 105 | |
|
106 | 106 | return {'suggestions': _user_groups} |
|
107 | 107 | |
|
108 | 108 | def _get_repo_list(self, name_contains=None, repo_type=None, repo_group_name='', limit=20): |
|
109 | 109 | org_query = name_contains |
|
110 | 110 | allowed_ids = self._rhodecode_user.repo_acl_ids( |
|
111 | 111 | ['repository.read', 'repository.write', 'repository.admin'], |
|
112 | 112 | cache=False, name_filter=name_contains) or [-1] |
|
113 | 113 | |
|
114 | 114 | query = Repository.query()\ |
|
115 | 115 | .filter(Repository.archived.isnot(true()))\ |
|
116 | 116 | .filter(or_( |
|
117 | 117 | # generate multiple IN to fix limitation problems |
|
118 | 118 | *in_filter_generator(Repository.repo_id, allowed_ids) |
|
119 | 119 | )) |
|
120 | 120 | |
|
121 | 121 | query = query.order_by(case( |
|
122 | 122 | [ |
|
123 | 123 | (Repository.repo_name.startswith(repo_group_name), repo_group_name+'/'), |
|
124 | 124 | ], |
|
125 | 125 | )) |
|
126 | 126 | query = query.order_by(func.length(Repository.repo_name)) |
|
127 | 127 | query = query.order_by(Repository.repo_name) |
|
128 | 128 | |
|
129 | 129 | if repo_type: |
|
130 | 130 | query = query.filter(Repository.repo_type == repo_type) |
|
131 | 131 | |
|
132 | 132 | if name_contains: |
|
133 | 133 | ilike_expression = u'%{}%'.format(safe_unicode(name_contains)) |
|
134 | 134 | query = query.filter( |
|
135 | 135 | Repository.repo_name.ilike(ilike_expression)) |
|
136 | 136 | query = query.limit(limit) |
|
137 | 137 | |
|
138 | 138 | acl_iter = query |
|
139 | 139 | |
|
140 | 140 | return [ |
|
141 | 141 | { |
|
142 | 142 | 'id': obj.repo_name, |
|
143 | 143 | 'value': org_query, |
|
144 | 144 | 'value_display': obj.repo_name, |
|
145 | 145 | 'text': obj.repo_name, |
|
146 | 146 | 'type': 'repo', |
|
147 | 147 | 'repo_id': obj.repo_id, |
|
148 | 148 | 'repo_type': obj.repo_type, |
|
149 | 149 | 'private': obj.private, |
|
150 | 150 | 'url': h.route_path('repo_summary', repo_name=obj.repo_name) |
|
151 | 151 | } |
|
152 | 152 | for obj in acl_iter] |
|
153 | 153 | |
|
154 | 154 | def _get_repo_group_list(self, name_contains=None, repo_group_name='', limit=20): |
|
155 | 155 | org_query = name_contains |
|
156 | 156 | allowed_ids = self._rhodecode_user.repo_group_acl_ids( |
|
157 | 157 | ['group.read', 'group.write', 'group.admin'], |
|
158 | 158 | cache=False, name_filter=name_contains) or [-1] |
|
159 | 159 | |
|
160 | 160 | query = RepoGroup.query()\ |
|
161 | 161 | .filter(or_( |
|
162 | 162 | # generate multiple IN to fix limitation problems |
|
163 | 163 | *in_filter_generator(RepoGroup.group_id, allowed_ids) |
|
164 | 164 | )) |
|
165 | 165 | |
|
166 | 166 | query = query.order_by(case( |
|
167 | 167 | [ |
|
168 | 168 | (RepoGroup.group_name.startswith(repo_group_name), repo_group_name+'/'), |
|
169 | 169 | ], |
|
170 | 170 | )) |
|
171 | 171 | query = query.order_by(func.length(RepoGroup.group_name)) |
|
172 | 172 | query = query.order_by(RepoGroup.group_name) |
|
173 | 173 | |
|
174 | 174 | if name_contains: |
|
175 | 175 | ilike_expression = u'%{}%'.format(safe_unicode(name_contains)) |
|
176 | 176 | query = query.filter( |
|
177 | 177 | RepoGroup.group_name.ilike(ilike_expression)) |
|
178 | 178 | query = query.limit(limit) |
|
179 | 179 | |
|
180 | 180 | acl_iter = query |
|
181 | 181 | |
|
182 | 182 | return [ |
|
183 | 183 | { |
|
184 | 184 | 'id': obj.group_name, |
|
185 | 185 | 'value': org_query, |
|
186 | 186 | 'value_display': obj.group_name, |
|
187 | 187 | 'text': obj.group_name, |
|
188 | 188 | 'type': 'repo_group', |
|
189 | 189 | 'repo_group_id': obj.group_id, |
|
190 | 190 | 'url': h.route_path( |
|
191 | 191 | 'repo_group_home', repo_group_name=obj.group_name) |
|
192 | 192 | } |
|
193 | 193 | for obj in acl_iter] |
|
194 | 194 | |
|
195 | 195 | def _get_user_list(self, name_contains=None, limit=20): |
|
196 | 196 | org_query = name_contains |
|
197 | 197 | if not name_contains: |
|
198 | 198 | return [], False |
|
199 | 199 | |
|
200 | 200 | # TODO(marcink): should all logged in users be allowed to search others? |
|
201 | 201 | allowed_user_search = self._rhodecode_user.username != User.DEFAULT_USER |
|
202 | 202 | if not allowed_user_search: |
|
203 | 203 | return [], False |
|
204 | 204 | |
|
205 | 205 | name_contains = re.compile('(?:user:[ ]?)(.+)').findall(name_contains) |
|
206 | 206 | if len(name_contains) != 1: |
|
207 | 207 | return [], False |
|
208 | 208 | |
|
209 | 209 | name_contains = name_contains[0] |
|
210 | 210 | |
|
211 | 211 | query = User.query()\ |
|
212 | 212 | .order_by(func.length(User.username))\ |
|
213 | 213 | .order_by(User.username) \ |
|
214 | 214 | .filter(User.username != User.DEFAULT_USER) |
|
215 | 215 | |
|
216 | 216 | if name_contains: |
|
217 | 217 | ilike_expression = u'%{}%'.format(safe_unicode(name_contains)) |
|
218 | 218 | query = query.filter( |
|
219 | 219 | User.username.ilike(ilike_expression)) |
|
220 | 220 | query = query.limit(limit) |
|
221 | 221 | |
|
222 | 222 | acl_iter = query |
|
223 | 223 | |
|
224 | 224 | return [ |
|
225 | 225 | { |
|
226 | 226 | 'id': obj.user_id, |
|
227 | 227 | 'value': org_query, |
|
228 | 228 | 'value_display': 'user: `{}`'.format(obj.username), |
|
229 | 229 | 'type': 'user', |
|
230 | 230 | 'icon_link': h.gravatar_url(obj.email, 30), |
|
231 | 231 | 'url': h.route_path( |
|
232 | 232 | 'user_profile', username=obj.username) |
|
233 | 233 | } |
|
234 | 234 | for obj in acl_iter], True |
|
235 | 235 | |
|
236 | 236 | def _get_user_groups_list(self, name_contains=None, limit=20): |
|
237 | 237 | org_query = name_contains |
|
238 | 238 | if not name_contains: |
|
239 | 239 | return [], False |
|
240 | 240 | |
|
241 | 241 | # TODO(marcink): should all logged in users be allowed to search others? |
|
242 | 242 | allowed_user_search = self._rhodecode_user.username != User.DEFAULT_USER |
|
243 | 243 | if not allowed_user_search: |
|
244 | 244 | return [], False |
|
245 | 245 | |
|
246 | 246 | name_contains = re.compile('(?:user_group:[ ]?)(.+)').findall(name_contains) |
|
247 | 247 | if len(name_contains) != 1: |
|
248 | 248 | return [], False |
|
249 | 249 | |
|
250 | 250 | name_contains = name_contains[0] |
|
251 | 251 | |
|
252 | 252 | query = UserGroup.query()\ |
|
253 | 253 | .order_by(func.length(UserGroup.users_group_name))\ |
|
254 | 254 | .order_by(UserGroup.users_group_name) |
|
255 | 255 | |
|
256 | 256 | if name_contains: |
|
257 | 257 | ilike_expression = u'%{}%'.format(safe_unicode(name_contains)) |
|
258 | 258 | query = query.filter( |
|
259 | 259 | UserGroup.users_group_name.ilike(ilike_expression)) |
|
260 | 260 | query = query.limit(limit) |
|
261 | 261 | |
|
262 | 262 | acl_iter = query |
|
263 | 263 | |
|
264 | 264 | return [ |
|
265 | 265 | { |
|
266 | 266 | 'id': obj.users_group_id, |
|
267 | 267 | 'value': org_query, |
|
268 | 268 | 'value_display': 'user_group: `{}`'.format(obj.users_group_name), |
|
269 | 269 | 'type': 'user_group', |
|
270 | 270 | 'url': h.route_path( |
|
271 | 271 | 'user_group_profile', user_group_name=obj.users_group_name) |
|
272 | 272 | } |
|
273 | 273 | for obj in acl_iter], True |
|
274 | 274 | |
|
275 | 275 | def _get_hash_commit_list(self, auth_user, searcher, query, repo=None, repo_group=None): |
|
276 | 276 | repo_name = repo_group_name = None |
|
277 | 277 | if repo: |
|
278 | 278 | repo_name = repo.repo_name |
|
279 | 279 | if repo_group: |
|
280 | 280 | repo_group_name = repo_group.group_name |
|
281 | 281 | |
|
282 | 282 | org_query = query |
|
283 | 283 | if not query or len(query) < 3 or not searcher: |
|
284 | 284 | return [], False |
|
285 | 285 | |
|
286 | 286 | commit_hashes = re.compile('(?:commit:[ ]?)([0-9a-f]{2,40})').findall(query) |
|
287 | 287 | |
|
288 | 288 | if len(commit_hashes) != 1: |
|
289 | 289 | return [], False |
|
290 | 290 | |
|
291 | 291 | commit_hash = commit_hashes[0] |
|
292 | 292 | |
|
293 | 293 | result = searcher.search( |
|
294 | 294 | 'commit_id:{}*'.format(commit_hash), 'commit', auth_user, |
|
295 | 295 | repo_name, repo_group_name, raise_on_exc=False) |
|
296 | 296 | |
|
297 | 297 | commits = [] |
|
298 | 298 | for entry in result['results']: |
|
299 | 299 | repo_data = { |
|
300 | 300 | 'repository_id': entry.get('repository_id'), |
|
301 | 301 | 'repository_type': entry.get('repo_type'), |
|
302 | 302 | 'repository_name': entry.get('repository'), |
|
303 | 303 | } |
|
304 | 304 | |
|
305 | 305 | commit_entry = { |
|
306 | 306 | 'id': entry['commit_id'], |
|
307 | 307 | 'value': org_query, |
|
308 | 308 | 'value_display': '`{}` commit: {}'.format( |
|
309 | 309 | entry['repository'], entry['commit_id']), |
|
310 | 310 | 'type': 'commit', |
|
311 | 311 | 'repo': entry['repository'], |
|
312 | 312 | 'repo_data': repo_data, |
|
313 | 313 | |
|
314 | 314 | 'url': h.route_path( |
|
315 | 315 | 'repo_commit', |
|
316 | 316 | repo_name=entry['repository'], commit_id=entry['commit_id']) |
|
317 | 317 | } |
|
318 | 318 | |
|
319 | 319 | commits.append(commit_entry) |
|
320 | 320 | return commits, True |
|
321 | 321 | |
|
322 | 322 | def _get_path_list(self, auth_user, searcher, query, repo=None, repo_group=None): |
|
323 | 323 | repo_name = repo_group_name = None |
|
324 | 324 | if repo: |
|
325 | 325 | repo_name = repo.repo_name |
|
326 | 326 | if repo_group: |
|
327 | 327 | repo_group_name = repo_group.group_name |
|
328 | 328 | |
|
329 | 329 | org_query = query |
|
330 | 330 | if not query or len(query) < 3 or not searcher: |
|
331 | 331 | return [], False |
|
332 | 332 | |
|
333 | 333 | paths_re = re.compile('(?:file:[ ]?)(.+)').findall(query) |
|
334 | 334 | if len(paths_re) != 1: |
|
335 | 335 | return [], False |
|
336 | 336 | |
|
337 | 337 | file_path = paths_re[0] |
|
338 | 338 | |
|
339 | 339 | search_path = searcher.escape_specials(file_path) |
|
340 | 340 | result = searcher.search( |
|
341 | 341 | 'file.raw:*{}*'.format(search_path), 'path', auth_user, |
|
342 | 342 | repo_name, repo_group_name, raise_on_exc=False) |
|
343 | 343 | |
|
344 | 344 | files = [] |
|
345 | 345 | for entry in result['results']: |
|
346 | 346 | repo_data = { |
|
347 | 347 | 'repository_id': entry.get('repository_id'), |
|
348 | 348 | 'repository_type': entry.get('repo_type'), |
|
349 | 349 | 'repository_name': entry.get('repository'), |
|
350 | 350 | } |
|
351 | 351 | |
|
352 | 352 | file_entry = { |
|
353 | 353 | 'id': entry['commit_id'], |
|
354 | 354 | 'value': org_query, |
|
355 | 355 | 'value_display': '`{}` file: {}'.format( |
|
356 | 356 | entry['repository'], entry['file']), |
|
357 | 357 | 'type': 'file', |
|
358 | 358 | 'repo': entry['repository'], |
|
359 | 359 | 'repo_data': repo_data, |
|
360 | 360 | |
|
361 | 361 | 'url': h.route_path( |
|
362 | 362 | 'repo_files', |
|
363 | 363 | repo_name=entry['repository'], commit_id=entry['commit_id'], |
|
364 | 364 | f_path=entry['file']) |
|
365 | 365 | } |
|
366 | 366 | |
|
367 | 367 | files.append(file_entry) |
|
368 | 368 | return files, True |
|
369 | 369 | |
|
370 | 370 | @LoginRequired() |
|
371 | 371 | @view_config( |
|
372 | 372 | route_name='repo_list_data', request_method='GET', |
|
373 | 373 | renderer='json_ext', xhr=True) |
|
374 | 374 | def repo_list_data(self): |
|
375 | 375 | _ = self.request.translate |
|
376 | 376 | self.load_default_context() |
|
377 | 377 | |
|
378 | 378 | query = self.request.GET.get('query') |
|
379 | 379 | repo_type = self.request.GET.get('repo_type') |
|
380 | 380 | log.debug('generating repo list, query:%s, repo_type:%s', |
|
381 | 381 | query, repo_type) |
|
382 | 382 | |
|
383 | 383 | res = [] |
|
384 | 384 | repos = self._get_repo_list(query, repo_type=repo_type) |
|
385 | 385 | if repos: |
|
386 | 386 | res.append({ |
|
387 | 387 | 'text': _('Repositories'), |
|
388 | 388 | 'children': repos |
|
389 | 389 | }) |
|
390 | 390 | |
|
391 | 391 | data = { |
|
392 | 392 | 'more': False, |
|
393 | 393 | 'results': res |
|
394 | 394 | } |
|
395 | 395 | return data |
|
396 | 396 | |
|
397 | 397 | @LoginRequired() |
|
398 | 398 | @view_config( |
|
399 | 399 | route_name='repo_group_list_data', request_method='GET', |
|
400 | 400 | renderer='json_ext', xhr=True) |
|
401 | 401 | def repo_group_list_data(self): |
|
402 | 402 | _ = self.request.translate |
|
403 | 403 | self.load_default_context() |
|
404 | 404 | |
|
405 | 405 | query = self.request.GET.get('query') |
|
406 | 406 | |
|
407 | 407 | log.debug('generating repo group list, query:%s', |
|
408 | 408 | query) |
|
409 | 409 | |
|
410 | 410 | res = [] |
|
411 | 411 | repo_groups = self._get_repo_group_list(query) |
|
412 | 412 | if repo_groups: |
|
413 | 413 | res.append({ |
|
414 | 414 | 'text': _('Repository Groups'), |
|
415 | 415 | 'children': repo_groups |
|
416 | 416 | }) |
|
417 | 417 | |
|
418 | 418 | data = { |
|
419 | 419 | 'more': False, |
|
420 | 420 | 'results': res |
|
421 | 421 | } |
|
422 | 422 | return data |
|
423 | 423 | |
|
424 | 424 | def _get_default_search_queries(self, search_context, searcher, query): |
|
425 | 425 | if not searcher: |
|
426 | 426 | return [] |
|
427 | 427 | |
|
428 | 428 | is_es_6 = searcher.is_es_6 |
|
429 | 429 | |
|
430 | 430 | queries = [] |
|
431 | 431 | repo_group_name, repo_name, repo_context = None, None, None |
|
432 | 432 | |
|
433 | 433 | # repo group context |
|
434 | 434 | if search_context.get('search_context[repo_group_name]'): |
|
435 | 435 | repo_group_name = search_context.get('search_context[repo_group_name]') |
|
436 | 436 | if search_context.get('search_context[repo_name]'): |
|
437 | 437 | repo_name = search_context.get('search_context[repo_name]') |
|
438 | 438 | repo_context = search_context.get('search_context[repo_view_type]') |
|
439 | 439 | |
|
440 | 440 | if is_es_6 and repo_name: |
|
441 | 441 | # files |
|
442 | 442 | def query_modifier(): |
|
443 | 443 | qry = query |
|
444 | 444 | return {'q': qry, 'type': 'content'} |
|
445 | 445 | label = u'File search for `{}` in this repository.'.format(query) |
|
446 | 446 | file_qry = { |
|
447 | 447 | 'id': -10, |
|
448 | 448 | 'value': query, |
|
449 | 449 | 'value_display': label, |
|
450 | 450 | 'type': 'search', |
|
451 | 451 | 'url': h.route_path('search_repo', |
|
452 | 452 | repo_name=repo_name, |
|
453 | 453 | _query=query_modifier()) |
|
454 | 454 | } |
|
455 | 455 | |
|
456 | 456 | # commits |
|
457 | 457 | def query_modifier(): |
|
458 | 458 | qry = query |
|
459 | 459 | return {'q': qry, 'type': 'commit'} |
|
460 | 460 | |
|
461 | 461 | label = u'Commit search for `{}` in this repository.'.format(query) |
|
462 | 462 | commit_qry = { |
|
463 | 463 | 'id': -20, |
|
464 | 464 | 'value': query, |
|
465 | 465 | 'value_display': label, |
|
466 | 466 | 'type': 'search', |
|
467 | 467 | 'url': h.route_path('search_repo', |
|
468 | 468 | repo_name=repo_name, |
|
469 | 469 | _query=query_modifier()) |
|
470 | 470 | } |
|
471 | 471 | |
|
472 | 472 | if repo_context in ['commit', 'changelog']: |
|
473 | 473 | queries.extend([commit_qry, file_qry]) |
|
474 | 474 | elif repo_context in ['files', 'summary']: |
|
475 | 475 | queries.extend([file_qry, commit_qry]) |
|
476 | 476 | else: |
|
477 | 477 | queries.extend([commit_qry, file_qry]) |
|
478 | 478 | |
|
479 | 479 | elif is_es_6 and repo_group_name: |
|
480 | 480 | # files |
|
481 | 481 | def query_modifier(): |
|
482 | 482 | qry = query |
|
483 | 483 | return {'q': qry, 'type': 'content'} |
|
484 | 484 | |
|
485 | 485 | label = u'File search for `{}` in this repository group'.format(query) |
|
486 | 486 | file_qry = { |
|
487 | 487 | 'id': -30, |
|
488 | 488 | 'value': query, |
|
489 | 489 | 'value_display': label, |
|
490 | 490 | 'type': 'search', |
|
491 | 491 | 'url': h.route_path('search_repo_group', |
|
492 | 492 | repo_group_name=repo_group_name, |
|
493 | 493 | _query=query_modifier()) |
|
494 | 494 | } |
|
495 | 495 | |
|
496 | 496 | # commits |
|
497 | 497 | def query_modifier(): |
|
498 | 498 | qry = query |
|
499 | 499 | return {'q': qry, 'type': 'commit'} |
|
500 | 500 | |
|
501 | 501 | label = u'Commit search for `{}` in this repository group'.format(query) |
|
502 | 502 | commit_qry = { |
|
503 | 503 | 'id': -40, |
|
504 | 504 | 'value': query, |
|
505 | 505 | 'value_display': label, |
|
506 | 506 | 'type': 'search', |
|
507 | 507 | 'url': h.route_path('search_repo_group', |
|
508 | 508 | repo_group_name=repo_group_name, |
|
509 | 509 | _query=query_modifier()) |
|
510 | 510 | } |
|
511 | 511 | |
|
512 | 512 | if repo_context in ['commit', 'changelog']: |
|
513 | 513 | queries.extend([commit_qry, file_qry]) |
|
514 | 514 | elif repo_context in ['files', 'summary']: |
|
515 | 515 | queries.extend([file_qry, commit_qry]) |
|
516 | 516 | else: |
|
517 | 517 | queries.extend([commit_qry, file_qry]) |
|
518 | 518 | |
|
519 | 519 | # Global, not scoped |
|
520 | 520 | if not queries: |
|
521 | 521 | queries.append( |
|
522 | 522 | { |
|
523 | 523 | 'id': -1, |
|
524 | 524 | 'value': query, |
|
525 | 525 | 'value_display': u'File search for: `{}`'.format(query), |
|
526 | 526 | 'type': 'search', |
|
527 | 527 | 'url': h.route_path('search', |
|
528 | 528 | _query={'q': query, 'type': 'content'}) |
|
529 | 529 | }) |
|
530 | 530 | queries.append( |
|
531 | 531 | { |
|
532 | 532 | 'id': -2, |
|
533 | 533 | 'value': query, |
|
534 | 534 | 'value_display': u'Commit search for: `{}`'.format(query), |
|
535 | 535 | 'type': 'search', |
|
536 | 536 | 'url': h.route_path('search', |
|
537 | 537 | _query={'q': query, 'type': 'commit'}) |
|
538 | 538 | }) |
|
539 | 539 | |
|
540 | 540 | return queries |
|
541 | 541 | |
|
542 | 542 | @LoginRequired() |
|
543 | 543 | @view_config( |
|
544 | 544 | route_name='goto_switcher_data', request_method='GET', |
|
545 | 545 | renderer='json_ext', xhr=True) |
|
546 | 546 | def goto_switcher_data(self): |
|
547 | 547 | c = self.load_default_context() |
|
548 | 548 | |
|
549 | 549 | _ = self.request.translate |
|
550 | 550 | |
|
551 | 551 | query = self.request.GET.get('query') |
|
552 | 552 | log.debug('generating main filter data, query %s', query) |
|
553 | 553 | |
|
554 | 554 | res = [] |
|
555 | 555 | if not query: |
|
556 | 556 | return {'suggestions': res} |
|
557 | 557 | |
|
558 | 558 | def no_match(name): |
|
559 | 559 | return { |
|
560 | 560 | 'id': -1, |
|
561 | 561 | 'value': "", |
|
562 | 562 | 'value_display': name, |
|
563 | 563 | 'type': 'text', |
|
564 | 564 | 'url': "" |
|
565 | 565 | } |
|
566 | 566 | searcher = searcher_from_config(self.request.registry.settings) |
|
567 | 567 | has_specialized_search = False |
|
568 | 568 | |
|
569 | 569 | # set repo context |
|
570 | 570 | repo = None |
|
571 | 571 | repo_id = safe_int(self.request.GET.get('search_context[repo_id]')) |
|
572 | 572 | if repo_id: |
|
573 | 573 | repo = Repository.get(repo_id) |
|
574 | 574 | |
|
575 | 575 | # set group context |
|
576 | 576 | repo_group = None |
|
577 | 577 | repo_group_id = safe_int(self.request.GET.get('search_context[repo_group_id]')) |
|
578 | 578 | if repo_group_id: |
|
579 | 579 | repo_group = RepoGroup.get(repo_group_id) |
|
580 | 580 | prefix_match = False |
|
581 | 581 | |
|
582 | 582 | # user: type search |
|
583 | 583 | if not prefix_match: |
|
584 | 584 | users, prefix_match = self._get_user_list(query) |
|
585 | 585 | if users: |
|
586 | 586 | has_specialized_search = True |
|
587 | 587 | for serialized_user in users: |
|
588 | 588 | res.append(serialized_user) |
|
589 | 589 | elif prefix_match: |
|
590 | 590 | has_specialized_search = True |
|
591 | 591 | res.append(no_match('No matching users found')) |
|
592 | 592 | |
|
593 | 593 | # user_group: type search |
|
594 | 594 | if not prefix_match: |
|
595 | 595 | user_groups, prefix_match = self._get_user_groups_list(query) |
|
596 | 596 | if user_groups: |
|
597 | 597 | has_specialized_search = True |
|
598 | 598 | for serialized_user_group in user_groups: |
|
599 | 599 | res.append(serialized_user_group) |
|
600 | 600 | elif prefix_match: |
|
601 | 601 | has_specialized_search = True |
|
602 | 602 | res.append(no_match('No matching user groups found')) |
|
603 | 603 | |
|
604 | 604 | # FTS commit: type search |
|
605 | 605 | if not prefix_match: |
|
606 | 606 | commits, prefix_match = self._get_hash_commit_list( |
|
607 | 607 | c.auth_user, searcher, query, repo, repo_group) |
|
608 | 608 | if commits: |
|
609 | 609 | has_specialized_search = True |
|
610 | 610 | unique_repos = collections.OrderedDict() |
|
611 | 611 | for commit in commits: |
|
612 | 612 | repo_name = commit['repo'] |
|
613 | 613 | unique_repos.setdefault(repo_name, []).append(commit) |
|
614 | 614 | |
|
615 | 615 | for _repo, commits in unique_repos.items(): |
|
616 | 616 | for commit in commits: |
|
617 | 617 | res.append(commit) |
|
618 | 618 | elif prefix_match: |
|
619 | 619 | has_specialized_search = True |
|
620 | 620 | res.append(no_match('No matching commits found')) |
|
621 | 621 | |
|
622 | 622 | # FTS file: type search |
|
623 | 623 | if not prefix_match: |
|
624 | 624 | paths, prefix_match = self._get_path_list( |
|
625 | 625 | c.auth_user, searcher, query, repo, repo_group) |
|
626 | 626 | if paths: |
|
627 | 627 | has_specialized_search = True |
|
628 | 628 | unique_repos = collections.OrderedDict() |
|
629 | 629 | for path in paths: |
|
630 | 630 | repo_name = path['repo'] |
|
631 | 631 | unique_repos.setdefault(repo_name, []).append(path) |
|
632 | 632 | |
|
633 | 633 | for repo, paths in unique_repos.items(): |
|
634 | 634 | for path in paths: |
|
635 | 635 | res.append(path) |
|
636 | 636 | elif prefix_match: |
|
637 | 637 | has_specialized_search = True |
|
638 | 638 | res.append(no_match('No matching files found')) |
|
639 | 639 | |
|
640 | 640 | # main suggestions |
|
641 | 641 | if not has_specialized_search: |
|
642 | 642 | repo_group_name = '' |
|
643 | 643 | if repo_group: |
|
644 | 644 | repo_group_name = repo_group.group_name |
|
645 | 645 | |
|
646 | 646 | for _q in self._get_default_search_queries(self.request.GET, searcher, query): |
|
647 | 647 | res.append(_q) |
|
648 | 648 | |
|
649 | 649 | repo_groups = self._get_repo_group_list(query, repo_group_name=repo_group_name) |
|
650 | 650 | for serialized_repo_group in repo_groups: |
|
651 | 651 | res.append(serialized_repo_group) |
|
652 | 652 | |
|
653 | 653 | repos = self._get_repo_list(query, repo_group_name=repo_group_name) |
|
654 | 654 | for serialized_repo in repos: |
|
655 | 655 | res.append(serialized_repo) |
|
656 | 656 | |
|
657 | 657 | if not repos and not repo_groups: |
|
658 | 658 | res.append(no_match('No matches found')) |
|
659 | 659 | |
|
660 | 660 | return {'suggestions': res} |
|
661 | 661 | |
|
662 | 662 | def _get_groups_and_repos(self, repo_group_id=None): |
|
663 | 663 | # repo groups groups |
|
664 | 664 | repo_group_list = RepoGroup.get_all_repo_groups(group_id=repo_group_id) |
|
665 | 665 | _perms = ['group.read', 'group.write', 'group.admin'] |
|
666 | 666 | repo_group_list_acl = RepoGroupList(repo_group_list, perm_set=_perms) |
|
667 | 667 | repo_group_data = RepoGroupModel().get_repo_groups_as_dict( |
|
668 | 668 | repo_group_list=repo_group_list_acl, admin=False) |
|
669 | 669 | |
|
670 | 670 | # repositories |
|
671 | 671 | repo_list = Repository.get_all_repos(group_id=repo_group_id) |
|
672 | 672 | _perms = ['repository.read', 'repository.write', 'repository.admin'] |
|
673 | 673 | repo_list_acl = RepoList(repo_list, perm_set=_perms) |
|
674 | 674 | repo_data = RepoModel().get_repos_as_dict( |
|
675 | 675 | repo_list=repo_list_acl, admin=False) |
|
676 | 676 | |
|
677 | 677 | return repo_data, repo_group_data |
|
678 | 678 | |
|
679 | 679 | @LoginRequired() |
|
680 | 680 | @view_config( |
|
681 | 681 | route_name='home', request_method='GET', |
|
682 | 682 | renderer='rhodecode:templates/index.mako') |
|
683 | 683 | def main_page(self): |
|
684 | 684 | c = self.load_default_context() |
|
685 | 685 | c.repo_group = None |
|
686 | 686 | |
|
687 | 687 | repo_data, repo_group_data = self._get_groups_and_repos() |
|
688 | 688 | # json used to render the grids |
|
689 | 689 | c.repos_data = json.dumps(repo_data) |
|
690 | 690 | c.repo_groups_data = json.dumps(repo_group_data) |
|
691 | 691 | |
|
692 | 692 | return self._get_template_context(c) |
|
693 | 693 | |
|
694 | 694 | @LoginRequired() |
|
695 | 695 | @HasRepoGroupPermissionAnyDecorator( |
|
696 | 696 | 'group.read', 'group.write', 'group.admin') |
|
697 | 697 | @view_config( |
|
698 | 698 | route_name='repo_group_home', request_method='GET', |
|
699 | 699 | renderer='rhodecode:templates/index_repo_group.mako') |
|
700 | 700 | @view_config( |
|
701 | 701 | route_name='repo_group_home_slash', request_method='GET', |
|
702 | 702 | renderer='rhodecode:templates/index_repo_group.mako') |
|
703 | 703 | def repo_group_main_page(self): |
|
704 | 704 | c = self.load_default_context() |
|
705 | 705 | c.repo_group = self.request.db_repo_group |
|
706 | repo_data, repo_group_data = self._get_groups_and_repos( | |
|
707 | c.repo_group.group_id) | |
|
706 | repo_data, repo_group_data = self._get_groups_and_repos(c.repo_group.group_id) | |
|
707 | ||
|
708 | c.repo_group.update_commit_cache() | |
|
708 | 709 | |
|
709 | 710 | # json used to render the grids |
|
710 | 711 | c.repos_data = json.dumps(repo_data) |
|
711 | 712 | c.repo_groups_data = json.dumps(repo_group_data) |
|
712 | 713 | |
|
713 | 714 | return self._get_template_context(c) |
|
714 | 715 | |
|
715 | 716 | @LoginRequired() |
|
716 | 717 | @CSRFRequired() |
|
717 | 718 | @view_config( |
|
718 | 719 | route_name='markup_preview', request_method='POST', |
|
719 | 720 | renderer='string', xhr=True) |
|
720 | 721 | def markup_preview(self): |
|
721 | 722 | # Technically a CSRF token is not needed as no state changes with this |
|
722 | 723 | # call. However, as this is a POST is better to have it, so automated |
|
723 | 724 | # tools don't flag it as potential CSRF. |
|
724 | 725 | # Post is required because the payload could be bigger than the maximum |
|
725 | 726 | # allowed by GET. |
|
726 | 727 | |
|
727 | 728 | text = self.request.POST.get('text') |
|
728 | 729 | renderer = self.request.POST.get('renderer') or 'rst' |
|
729 | 730 | if text: |
|
730 | 731 | return h.render(text, renderer=renderer, mentions=True) |
|
731 | 732 | return '' |
|
732 | 733 | |
|
733 | 734 | @LoginRequired() |
|
734 | 735 | @CSRFRequired() |
|
735 | 736 | @view_config( |
|
736 | 737 | route_name='store_user_session_value', request_method='POST', |
|
737 | 738 | renderer='string', xhr=True) |
|
738 | 739 | def store_user_session_attr(self): |
|
739 | 740 | key = self.request.POST.get('key') |
|
740 | 741 | val = self.request.POST.get('val') |
|
741 | 742 | |
|
742 | 743 | existing_value = self.request.session.get(key) |
|
743 | 744 | if existing_value != val: |
|
744 | 745 | self.request.session[key] = val |
|
745 | 746 | |
|
746 | 747 | return 'stored:{}:{}'.format(key, val) |
@@ -1,645 +1,642 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2019 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | Database creation, and setup module for RhodeCode Enterprise. Used for creation |
|
23 | 23 | of database as well as for migration operations |
|
24 | 24 | """ |
|
25 | 25 | |
|
26 | 26 | import os |
|
27 | 27 | import sys |
|
28 | 28 | import time |
|
29 | 29 | import uuid |
|
30 | 30 | import logging |
|
31 | 31 | import getpass |
|
32 | 32 | from os.path import dirname as dn, join as jn |
|
33 | 33 | |
|
34 | 34 | from sqlalchemy.engine import create_engine |
|
35 | 35 | |
|
36 | 36 | from rhodecode import __dbversion__ |
|
37 | 37 | from rhodecode.model import init_model |
|
38 | 38 | from rhodecode.model.user import UserModel |
|
39 | 39 | from rhodecode.model.db import ( |
|
40 | 40 | User, Permission, RhodeCodeUi, RhodeCodeSetting, UserToPerm, |
|
41 | 41 | DbMigrateVersion, RepoGroup, UserRepoGroupToPerm, CacheKey, Repository) |
|
42 | 42 | from rhodecode.model.meta import Session, Base |
|
43 | 43 | from rhodecode.model.permission import PermissionModel |
|
44 | 44 | from rhodecode.model.repo import RepoModel |
|
45 | 45 | from rhodecode.model.repo_group import RepoGroupModel |
|
46 | 46 | from rhodecode.model.settings import SettingsModel |
|
47 | 47 | |
|
48 | 48 | |
|
49 | 49 | log = logging.getLogger(__name__) |
|
50 | 50 | |
|
51 | 51 | |
|
52 | 52 | def notify(msg): |
|
53 | 53 | """ |
|
54 | 54 | Notification for migrations messages |
|
55 | 55 | """ |
|
56 | 56 | ml = len(msg) + (4 * 2) |
|
57 | 57 | print(('\n%s\n*** %s ***\n%s' % ('*' * ml, msg, '*' * ml)).upper()) |
|
58 | 58 | |
|
59 | 59 | |
|
60 | 60 | class DbManage(object): |
|
61 | 61 | |
|
62 | 62 | def __init__(self, log_sql, dbconf, root, tests=False, |
|
63 | 63 | SESSION=None, cli_args=None): |
|
64 | 64 | self.dbname = dbconf.split('/')[-1] |
|
65 | 65 | self.tests = tests |
|
66 | 66 | self.root = root |
|
67 | 67 | self.dburi = dbconf |
|
68 | 68 | self.log_sql = log_sql |
|
69 | 69 | self.db_exists = False |
|
70 | 70 | self.cli_args = cli_args or {} |
|
71 | 71 | self.init_db(SESSION=SESSION) |
|
72 | 72 | self.ask_ok = self.get_ask_ok_func(self.cli_args.get('force_ask')) |
|
73 | 73 | |
|
74 | 74 | def get_ask_ok_func(self, param): |
|
75 | 75 | if param not in [None]: |
|
76 | 76 | # return a function lambda that has a default set to param |
|
77 | 77 | return lambda *args, **kwargs: param |
|
78 | 78 | else: |
|
79 | 79 | from rhodecode.lib.utils import ask_ok |
|
80 | 80 | return ask_ok |
|
81 | 81 | |
|
82 | 82 | def init_db(self, SESSION=None): |
|
83 | 83 | if SESSION: |
|
84 | 84 | self.sa = SESSION |
|
85 | 85 | else: |
|
86 | 86 | # init new sessions |
|
87 | 87 | engine = create_engine(self.dburi, echo=self.log_sql) |
|
88 | 88 | init_model(engine) |
|
89 | 89 | self.sa = Session() |
|
90 | 90 | |
|
91 | 91 | def create_tables(self, override=False): |
|
92 | 92 | """ |
|
93 | 93 | Create a auth database |
|
94 | 94 | """ |
|
95 | 95 | |
|
96 | 96 | log.info("Existing database with the same name is going to be destroyed.") |
|
97 | 97 | log.info("Setup command will run DROP ALL command on that database.") |
|
98 | 98 | if self.tests: |
|
99 | 99 | destroy = True |
|
100 | 100 | else: |
|
101 | 101 | destroy = self.ask_ok('Are you sure that you want to destroy the old database? [y/n]') |
|
102 | 102 | if not destroy: |
|
103 | 103 | log.info('Nothing done.') |
|
104 | 104 | sys.exit(0) |
|
105 | 105 | if destroy: |
|
106 | 106 | Base.metadata.drop_all() |
|
107 | 107 | |
|
108 | 108 | checkfirst = not override |
|
109 | 109 | Base.metadata.create_all(checkfirst=checkfirst) |
|
110 | 110 | log.info('Created tables for %s', self.dbname) |
|
111 | 111 | |
|
112 | 112 | def set_db_version(self): |
|
113 | 113 | ver = DbMigrateVersion() |
|
114 | 114 | ver.version = __dbversion__ |
|
115 | 115 | ver.repository_id = 'rhodecode_db_migrations' |
|
116 | 116 | ver.repository_path = 'versions' |
|
117 | 117 | self.sa.add(ver) |
|
118 | 118 | log.info('db version set to: %s', __dbversion__) |
|
119 | 119 | |
|
120 | 120 | def run_pre_migration_tasks(self): |
|
121 | 121 | """ |
|
122 | 122 | Run various tasks before actually doing migrations |
|
123 | 123 | """ |
|
124 | 124 | # delete cache keys on each upgrade |
|
125 | 125 | total = CacheKey.query().count() |
|
126 | 126 | log.info("Deleting (%s) cache keys now...", total) |
|
127 | 127 | CacheKey.delete_all_cache() |
|
128 | 128 | |
|
129 | 129 | def upgrade(self, version=None): |
|
130 | 130 | """ |
|
131 | 131 | Upgrades given database schema to given revision following |
|
132 | 132 | all needed steps, to perform the upgrade |
|
133 | 133 | |
|
134 | 134 | """ |
|
135 | 135 | |
|
136 | 136 | from rhodecode.lib.dbmigrate.migrate.versioning import api |
|
137 | 137 | from rhodecode.lib.dbmigrate.migrate.exceptions import \ |
|
138 | 138 | DatabaseNotControlledError |
|
139 | 139 | |
|
140 | 140 | if 'sqlite' in self.dburi: |
|
141 | 141 | print( |
|
142 | 142 | '********************** WARNING **********************\n' |
|
143 | 143 | 'Make sure your version of sqlite is at least 3.7.X. \n' |
|
144 | 144 | 'Earlier versions are known to fail on some migrations\n' |
|
145 | 145 | '*****************************************************\n') |
|
146 | 146 | |
|
147 | 147 | upgrade = self.ask_ok( |
|
148 | 148 | 'You are about to perform a database upgrade. Make ' |
|
149 | 149 | 'sure you have backed up your database. ' |
|
150 | 150 | 'Continue ? [y/n]') |
|
151 | 151 | if not upgrade: |
|
152 | 152 | log.info('No upgrade performed') |
|
153 | 153 | sys.exit(0) |
|
154 | 154 | |
|
155 | 155 | repository_path = jn(dn(dn(dn(os.path.realpath(__file__)))), |
|
156 | 156 | 'rhodecode/lib/dbmigrate') |
|
157 | 157 | db_uri = self.dburi |
|
158 | 158 | |
|
159 | 159 | if version: |
|
160 | 160 | DbMigrateVersion.set_version(version) |
|
161 | 161 | |
|
162 | 162 | try: |
|
163 | 163 | curr_version = api.db_version(db_uri, repository_path) |
|
164 | 164 | msg = ('Found current database db_uri under version ' |
|
165 | 165 | 'control with version {}'.format(curr_version)) |
|
166 | 166 | |
|
167 | 167 | except (RuntimeError, DatabaseNotControlledError): |
|
168 | 168 | curr_version = 1 |
|
169 | 169 | msg = ('Current database is not under version control. Setting ' |
|
170 | 170 | 'as version %s' % curr_version) |
|
171 | 171 | api.version_control(db_uri, repository_path, curr_version) |
|
172 | 172 | |
|
173 | 173 | notify(msg) |
|
174 | 174 | |
|
175 | 175 | self.run_pre_migration_tasks() |
|
176 | 176 | |
|
177 | 177 | if curr_version == __dbversion__: |
|
178 | 178 | log.info('This database is already at the newest version') |
|
179 | 179 | sys.exit(0) |
|
180 | 180 | |
|
181 | 181 | upgrade_steps = range(curr_version + 1, __dbversion__ + 1) |
|
182 | 182 | notify('attempting to upgrade database from ' |
|
183 | 183 | 'version %s to version %s' % (curr_version, __dbversion__)) |
|
184 | 184 | |
|
185 | 185 | # CALL THE PROPER ORDER OF STEPS TO PERFORM FULL UPGRADE |
|
186 | 186 | _step = None |
|
187 | 187 | for step in upgrade_steps: |
|
188 | 188 | notify('performing upgrade step %s' % step) |
|
189 | 189 | time.sleep(0.5) |
|
190 | 190 | |
|
191 | 191 | api.upgrade(db_uri, repository_path, step) |
|
192 | 192 | self.sa.rollback() |
|
193 | 193 | notify('schema upgrade for step %s completed' % (step,)) |
|
194 | 194 | |
|
195 | 195 | _step = step |
|
196 | 196 | |
|
197 | 197 | notify('upgrade to version %s successful' % _step) |
|
198 | 198 | |
|
199 | 199 | def fix_repo_paths(self): |
|
200 | 200 | """ |
|
201 | 201 | Fixes an old RhodeCode version path into new one without a '*' |
|
202 | 202 | """ |
|
203 | 203 | |
|
204 | 204 | paths = self.sa.query(RhodeCodeUi)\ |
|
205 | 205 | .filter(RhodeCodeUi.ui_key == '/')\ |
|
206 | 206 | .scalar() |
|
207 | 207 | |
|
208 | 208 | paths.ui_value = paths.ui_value.replace('*', '') |
|
209 | 209 | |
|
210 | 210 | try: |
|
211 | 211 | self.sa.add(paths) |
|
212 | 212 | self.sa.commit() |
|
213 | 213 | except Exception: |
|
214 | 214 | self.sa.rollback() |
|
215 | 215 | raise |
|
216 | 216 | |
|
217 | 217 | def fix_default_user(self): |
|
218 | 218 | """ |
|
219 | 219 | Fixes an old default user with some 'nicer' default values, |
|
220 | 220 | used mostly for anonymous access |
|
221 | 221 | """ |
|
222 | 222 | def_user = self.sa.query(User)\ |
|
223 | 223 | .filter(User.username == User.DEFAULT_USER)\ |
|
224 | 224 | .one() |
|
225 | 225 | |
|
226 | 226 | def_user.name = 'Anonymous' |
|
227 | 227 | def_user.lastname = 'User' |
|
228 | 228 | def_user.email = User.DEFAULT_USER_EMAIL |
|
229 | 229 | |
|
230 | 230 | try: |
|
231 | 231 | self.sa.add(def_user) |
|
232 | 232 | self.sa.commit() |
|
233 | 233 | except Exception: |
|
234 | 234 | self.sa.rollback() |
|
235 | 235 | raise |
|
236 | 236 | |
|
237 | 237 | def fix_settings(self): |
|
238 | 238 | """ |
|
239 | 239 | Fixes rhodecode settings and adds ga_code key for google analytics |
|
240 | 240 | """ |
|
241 | 241 | |
|
242 | 242 | hgsettings3 = RhodeCodeSetting('ga_code', '') |
|
243 | 243 | |
|
244 | 244 | try: |
|
245 | 245 | self.sa.add(hgsettings3) |
|
246 | 246 | self.sa.commit() |
|
247 | 247 | except Exception: |
|
248 | 248 | self.sa.rollback() |
|
249 | 249 | raise |
|
250 | 250 | |
|
251 | 251 | def create_admin_and_prompt(self): |
|
252 | 252 | |
|
253 | 253 | # defaults |
|
254 | 254 | defaults = self.cli_args |
|
255 | 255 | username = defaults.get('username') |
|
256 | 256 | password = defaults.get('password') |
|
257 | 257 | email = defaults.get('email') |
|
258 | 258 | |
|
259 | 259 | if username is None: |
|
260 | 260 | username = raw_input('Specify admin username:') |
|
261 | 261 | if password is None: |
|
262 | 262 | password = self._get_admin_password() |
|
263 | 263 | if not password: |
|
264 | 264 | # second try |
|
265 | 265 | password = self._get_admin_password() |
|
266 | 266 | if not password: |
|
267 | 267 | sys.exit() |
|
268 | 268 | if email is None: |
|
269 | 269 | email = raw_input('Specify admin email:') |
|
270 | 270 | api_key = self.cli_args.get('api_key') |
|
271 | 271 | self.create_user(username, password, email, True, |
|
272 | 272 | strict_creation_check=False, |
|
273 | 273 | api_key=api_key) |
|
274 | 274 | |
|
275 | 275 | def _get_admin_password(self): |
|
276 | 276 | password = getpass.getpass('Specify admin password ' |
|
277 | 277 | '(min 6 chars):') |
|
278 | 278 | confirm = getpass.getpass('Confirm password:') |
|
279 | 279 | |
|
280 | 280 | if password != confirm: |
|
281 | 281 | log.error('passwords mismatch') |
|
282 | 282 | return False |
|
283 | 283 | if len(password) < 6: |
|
284 | 284 | log.error('password is too short - use at least 6 characters') |
|
285 | 285 | return False |
|
286 | 286 | |
|
287 | 287 | return password |
|
288 | 288 | |
|
289 | 289 | def create_test_admin_and_users(self): |
|
290 | 290 | log.info('creating admin and regular test users') |
|
291 | 291 | from rhodecode.tests import TEST_USER_ADMIN_LOGIN, \ |
|
292 | 292 | TEST_USER_ADMIN_PASS, TEST_USER_ADMIN_EMAIL, \ |
|
293 | 293 | TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS, \ |
|
294 | 294 | TEST_USER_REGULAR_EMAIL, TEST_USER_REGULAR2_LOGIN, \ |
|
295 | 295 | TEST_USER_REGULAR2_PASS, TEST_USER_REGULAR2_EMAIL |
|
296 | 296 | |
|
297 | 297 | self.create_user(TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS, |
|
298 | 298 | TEST_USER_ADMIN_EMAIL, True, api_key=True) |
|
299 | 299 | |
|
300 | 300 | self.create_user(TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS, |
|
301 | 301 | TEST_USER_REGULAR_EMAIL, False, api_key=True) |
|
302 | 302 | |
|
303 | 303 | self.create_user(TEST_USER_REGULAR2_LOGIN, TEST_USER_REGULAR2_PASS, |
|
304 | 304 | TEST_USER_REGULAR2_EMAIL, False, api_key=True) |
|
305 | 305 | |
|
306 | 306 | def create_ui_settings(self, repo_store_path): |
|
307 | 307 | """ |
|
308 | 308 | Creates ui settings, fills out hooks |
|
309 | 309 | and disables dotencode |
|
310 | 310 | """ |
|
311 | 311 | settings_model = SettingsModel(sa=self.sa) |
|
312 | 312 | from rhodecode.lib.vcs.backends.hg import largefiles_store |
|
313 | 313 | from rhodecode.lib.vcs.backends.git import lfs_store |
|
314 | 314 | |
|
315 | 315 | # Build HOOKS |
|
316 | 316 | hooks = [ |
|
317 | 317 | (RhodeCodeUi.HOOK_REPO_SIZE, 'python:vcsserver.hooks.repo_size'), |
|
318 | 318 | |
|
319 | 319 | # HG |
|
320 | 320 | (RhodeCodeUi.HOOK_PRE_PULL, 'python:vcsserver.hooks.pre_pull'), |
|
321 | 321 | (RhodeCodeUi.HOOK_PULL, 'python:vcsserver.hooks.log_pull_action'), |
|
322 | 322 | (RhodeCodeUi.HOOK_PRE_PUSH, 'python:vcsserver.hooks.pre_push'), |
|
323 | 323 | (RhodeCodeUi.HOOK_PRETX_PUSH, 'python:vcsserver.hooks.pre_push'), |
|
324 | 324 | (RhodeCodeUi.HOOK_PUSH, 'python:vcsserver.hooks.log_push_action'), |
|
325 | 325 | (RhodeCodeUi.HOOK_PUSH_KEY, 'python:vcsserver.hooks.key_push'), |
|
326 | 326 | |
|
327 | 327 | ] |
|
328 | 328 | |
|
329 | 329 | for key, value in hooks: |
|
330 | 330 | hook_obj = settings_model.get_ui_by_key(key) |
|
331 | 331 | hooks2 = hook_obj if hook_obj else RhodeCodeUi() |
|
332 | 332 | hooks2.ui_section = 'hooks' |
|
333 | 333 | hooks2.ui_key = key |
|
334 | 334 | hooks2.ui_value = value |
|
335 | 335 | self.sa.add(hooks2) |
|
336 | 336 | |
|
337 | 337 | # enable largefiles |
|
338 | 338 | largefiles = RhodeCodeUi() |
|
339 | 339 | largefiles.ui_section = 'extensions' |
|
340 | 340 | largefiles.ui_key = 'largefiles' |
|
341 | 341 | largefiles.ui_value = '' |
|
342 | 342 | self.sa.add(largefiles) |
|
343 | 343 | |
|
344 | 344 | # set default largefiles cache dir, defaults to |
|
345 | 345 | # /repo_store_location/.cache/largefiles |
|
346 | 346 | largefiles = RhodeCodeUi() |
|
347 | 347 | largefiles.ui_section = 'largefiles' |
|
348 | 348 | largefiles.ui_key = 'usercache' |
|
349 | 349 | largefiles.ui_value = largefiles_store(repo_store_path) |
|
350 | 350 | |
|
351 | 351 | self.sa.add(largefiles) |
|
352 | 352 | |
|
353 | 353 | # set default lfs cache dir, defaults to |
|
354 | 354 | # /repo_store_location/.cache/lfs_store |
|
355 | 355 | lfsstore = RhodeCodeUi() |
|
356 | 356 | lfsstore.ui_section = 'vcs_git_lfs' |
|
357 | 357 | lfsstore.ui_key = 'store_location' |
|
358 | 358 | lfsstore.ui_value = lfs_store(repo_store_path) |
|
359 | 359 | |
|
360 | 360 | self.sa.add(lfsstore) |
|
361 | 361 | |
|
362 | 362 | # enable hgsubversion disabled by default |
|
363 | 363 | hgsubversion = RhodeCodeUi() |
|
364 | 364 | hgsubversion.ui_section = 'extensions' |
|
365 | 365 | hgsubversion.ui_key = 'hgsubversion' |
|
366 | 366 | hgsubversion.ui_value = '' |
|
367 | 367 | hgsubversion.ui_active = False |
|
368 | 368 | self.sa.add(hgsubversion) |
|
369 | 369 | |
|
370 | 370 | # enable hgevolve disabled by default |
|
371 | 371 | hgevolve = RhodeCodeUi() |
|
372 | 372 | hgevolve.ui_section = 'extensions' |
|
373 | 373 | hgevolve.ui_key = 'evolve' |
|
374 | 374 | hgevolve.ui_value = '' |
|
375 | 375 | hgevolve.ui_active = False |
|
376 | 376 | self.sa.add(hgevolve) |
|
377 | 377 | |
|
378 | 378 | hgevolve = RhodeCodeUi() |
|
379 | 379 | hgevolve.ui_section = 'experimental' |
|
380 | 380 | hgevolve.ui_key = 'evolution' |
|
381 | 381 | hgevolve.ui_value = '' |
|
382 | 382 | hgevolve.ui_active = False |
|
383 | 383 | self.sa.add(hgevolve) |
|
384 | 384 | |
|
385 | 385 | hgevolve = RhodeCodeUi() |
|
386 | 386 | hgevolve.ui_section = 'experimental' |
|
387 | 387 | hgevolve.ui_key = 'evolution.exchange' |
|
388 | 388 | hgevolve.ui_value = '' |
|
389 | 389 | hgevolve.ui_active = False |
|
390 | 390 | self.sa.add(hgevolve) |
|
391 | 391 | |
|
392 | 392 | hgevolve = RhodeCodeUi() |
|
393 | 393 | hgevolve.ui_section = 'extensions' |
|
394 | 394 | hgevolve.ui_key = 'topic' |
|
395 | 395 | hgevolve.ui_value = '' |
|
396 | 396 | hgevolve.ui_active = False |
|
397 | 397 | self.sa.add(hgevolve) |
|
398 | 398 | |
|
399 | 399 | # enable hggit disabled by default |
|
400 | 400 | hggit = RhodeCodeUi() |
|
401 | 401 | hggit.ui_section = 'extensions' |
|
402 | 402 | hggit.ui_key = 'hggit' |
|
403 | 403 | hggit.ui_value = '' |
|
404 | 404 | hggit.ui_active = False |
|
405 | 405 | self.sa.add(hggit) |
|
406 | 406 | |
|
407 | 407 | # set svn branch defaults |
|
408 | 408 | branches = ["/branches/*", "/trunk"] |
|
409 | 409 | tags = ["/tags/*"] |
|
410 | 410 | |
|
411 | 411 | for branch in branches: |
|
412 | 412 | settings_model.create_ui_section_value( |
|
413 | 413 | RhodeCodeUi.SVN_BRANCH_ID, branch) |
|
414 | 414 | |
|
415 | 415 | for tag in tags: |
|
416 | 416 | settings_model.create_ui_section_value(RhodeCodeUi.SVN_TAG_ID, tag) |
|
417 | 417 | |
|
418 | 418 | def create_auth_plugin_options(self, skip_existing=False): |
|
419 | 419 | """ |
|
420 | 420 | Create default auth plugin settings, and make it active |
|
421 | 421 | |
|
422 | 422 | :param skip_existing: |
|
423 | 423 | """ |
|
424 | 424 | |
|
425 | 425 | for k, v, t in [('auth_plugins', 'egg:rhodecode-enterprise-ce#rhodecode', 'list'), |
|
426 | 426 | ('auth_rhodecode_enabled', 'True', 'bool')]: |
|
427 | 427 | if (skip_existing and |
|
428 | 428 | SettingsModel().get_setting_by_name(k) is not None): |
|
429 | 429 | log.debug('Skipping option %s', k) |
|
430 | 430 | continue |
|
431 | 431 | setting = RhodeCodeSetting(k, v, t) |
|
432 | 432 | self.sa.add(setting) |
|
433 | 433 | |
|
434 | 434 | def create_default_options(self, skip_existing=False): |
|
435 | 435 | """Creates default settings""" |
|
436 | 436 | |
|
437 | 437 | for k, v, t in [ |
|
438 | 438 | ('default_repo_enable_locking', False, 'bool'), |
|
439 | 439 | ('default_repo_enable_downloads', False, 'bool'), |
|
440 | 440 | ('default_repo_enable_statistics', False, 'bool'), |
|
441 | 441 | ('default_repo_private', False, 'bool'), |
|
442 | 442 | ('default_repo_type', 'hg', 'unicode')]: |
|
443 | 443 | |
|
444 | 444 | if (skip_existing and |
|
445 | 445 | SettingsModel().get_setting_by_name(k) is not None): |
|
446 | 446 | log.debug('Skipping option %s', k) |
|
447 | 447 | continue |
|
448 | 448 | setting = RhodeCodeSetting(k, v, t) |
|
449 | 449 | self.sa.add(setting) |
|
450 | 450 | |
|
451 | 451 | def fixup_groups(self): |
|
452 | 452 | def_usr = User.get_default_user() |
|
453 | 453 | for g in RepoGroup.query().all(): |
|
454 | 454 | g.group_name = g.get_new_name(g.name) |
|
455 | 455 | self.sa.add(g) |
|
456 | 456 | # get default perm |
|
457 | 457 | default = UserRepoGroupToPerm.query()\ |
|
458 | 458 | .filter(UserRepoGroupToPerm.group == g)\ |
|
459 | 459 | .filter(UserRepoGroupToPerm.user == def_usr)\ |
|
460 | 460 | .scalar() |
|
461 | 461 | |
|
462 | 462 | if default is None: |
|
463 | 463 | log.debug('missing default permission for group %s adding', g) |
|
464 | 464 | perm_obj = RepoGroupModel()._create_default_perms(g) |
|
465 | 465 | self.sa.add(perm_obj) |
|
466 | 466 | |
|
467 | 467 | def reset_permissions(self, username): |
|
468 | 468 | """ |
|
469 | 469 | Resets permissions to default state, useful when old systems had |
|
470 | 470 | bad permissions, we must clean them up |
|
471 | 471 | |
|
472 | 472 | :param username: |
|
473 | 473 | """ |
|
474 | 474 | default_user = User.get_by_username(username) |
|
475 | 475 | if not default_user: |
|
476 | 476 | return |
|
477 | 477 | |
|
478 | 478 | u2p = UserToPerm.query()\ |
|
479 | 479 | .filter(UserToPerm.user == default_user).all() |
|
480 | 480 | fixed = False |
|
481 | 481 | if len(u2p) != len(Permission.DEFAULT_USER_PERMISSIONS): |
|
482 | 482 | for p in u2p: |
|
483 | 483 | Session().delete(p) |
|
484 | 484 | fixed = True |
|
485 | 485 | self.populate_default_permissions() |
|
486 | 486 | return fixed |
|
487 | 487 | |
|
488 | def update_repo_info(self): | |
|
489 | RepoModel.update_repoinfo() | |
|
490 | ||
|
491 | 488 | def config_prompt(self, test_repo_path='', retries=3): |
|
492 | 489 | defaults = self.cli_args |
|
493 | 490 | _path = defaults.get('repos_location') |
|
494 | 491 | if retries == 3: |
|
495 | 492 | log.info('Setting up repositories config') |
|
496 | 493 | |
|
497 | 494 | if _path is not None: |
|
498 | 495 | path = _path |
|
499 | 496 | elif not self.tests and not test_repo_path: |
|
500 | 497 | path = raw_input( |
|
501 | 498 | 'Enter a valid absolute path to store repositories. ' |
|
502 | 499 | 'All repositories in that path will be added automatically:' |
|
503 | 500 | ) |
|
504 | 501 | else: |
|
505 | 502 | path = test_repo_path |
|
506 | 503 | path_ok = True |
|
507 | 504 | |
|
508 | 505 | # check proper dir |
|
509 | 506 | if not os.path.isdir(path): |
|
510 | 507 | path_ok = False |
|
511 | 508 | log.error('Given path %s is not a valid directory', path) |
|
512 | 509 | |
|
513 | 510 | elif not os.path.isabs(path): |
|
514 | 511 | path_ok = False |
|
515 | 512 | log.error('Given path %s is not an absolute path', path) |
|
516 | 513 | |
|
517 | 514 | # check if path is at least readable. |
|
518 | 515 | if not os.access(path, os.R_OK): |
|
519 | 516 | path_ok = False |
|
520 | 517 | log.error('Given path %s is not readable', path) |
|
521 | 518 | |
|
522 | 519 | # check write access, warn user about non writeable paths |
|
523 | 520 | elif not os.access(path, os.W_OK) and path_ok: |
|
524 | 521 | log.warning('No write permission to given path %s', path) |
|
525 | 522 | |
|
526 | 523 | q = ('Given path %s is not writeable, do you want to ' |
|
527 | 524 | 'continue with read only mode ? [y/n]' % (path,)) |
|
528 | 525 | if not self.ask_ok(q): |
|
529 | 526 | log.error('Canceled by user') |
|
530 | 527 | sys.exit(-1) |
|
531 | 528 | |
|
532 | 529 | if retries == 0: |
|
533 | 530 | sys.exit('max retries reached') |
|
534 | 531 | if not path_ok: |
|
535 | 532 | retries -= 1 |
|
536 | 533 | return self.config_prompt(test_repo_path, retries) |
|
537 | 534 | |
|
538 | 535 | real_path = os.path.normpath(os.path.realpath(path)) |
|
539 | 536 | |
|
540 | 537 | if real_path != os.path.normpath(path): |
|
541 | 538 | q = ('Path looks like a symlink, RhodeCode Enterprise will store ' |
|
542 | 539 | 'given path as %s ? [y/n]') % (real_path,) |
|
543 | 540 | if not self.ask_ok(q): |
|
544 | 541 | log.error('Canceled by user') |
|
545 | 542 | sys.exit(-1) |
|
546 | 543 | |
|
547 | 544 | return real_path |
|
548 | 545 | |
|
549 | 546 | def create_settings(self, path): |
|
550 | 547 | |
|
551 | 548 | self.create_ui_settings(path) |
|
552 | 549 | |
|
553 | 550 | ui_config = [ |
|
554 | 551 | ('web', 'push_ssl', 'False'), |
|
555 | 552 | ('web', 'allow_archive', 'gz zip bz2'), |
|
556 | 553 | ('web', 'allow_push', '*'), |
|
557 | 554 | ('web', 'baseurl', '/'), |
|
558 | 555 | ('paths', '/', path), |
|
559 | 556 | ('phases', 'publish', 'True') |
|
560 | 557 | ] |
|
561 | 558 | for section, key, value in ui_config: |
|
562 | 559 | ui_conf = RhodeCodeUi() |
|
563 | 560 | setattr(ui_conf, 'ui_section', section) |
|
564 | 561 | setattr(ui_conf, 'ui_key', key) |
|
565 | 562 | setattr(ui_conf, 'ui_value', value) |
|
566 | 563 | self.sa.add(ui_conf) |
|
567 | 564 | |
|
568 | 565 | # rhodecode app settings |
|
569 | 566 | settings = [ |
|
570 | 567 | ('realm', 'RhodeCode', 'unicode'), |
|
571 | 568 | ('title', '', 'unicode'), |
|
572 | 569 | ('pre_code', '', 'unicode'), |
|
573 | 570 | ('post_code', '', 'unicode'), |
|
574 | 571 | ('show_public_icon', True, 'bool'), |
|
575 | 572 | ('show_private_icon', True, 'bool'), |
|
576 | 573 | ('stylify_metatags', False, 'bool'), |
|
577 | 574 | ('dashboard_items', 100, 'int'), |
|
578 | 575 | ('admin_grid_items', 25, 'int'), |
|
579 | 576 | ('show_version', True, 'bool'), |
|
580 | 577 | ('use_gravatar', False, 'bool'), |
|
581 | 578 | ('gravatar_url', User.DEFAULT_GRAVATAR_URL, 'unicode'), |
|
582 | 579 | ('clone_uri_tmpl', Repository.DEFAULT_CLONE_URI, 'unicode'), |
|
583 | 580 | ('support_url', '', 'unicode'), |
|
584 | 581 | ('update_url', RhodeCodeSetting.DEFAULT_UPDATE_URL, 'unicode'), |
|
585 | 582 | ('show_revision_number', True, 'bool'), |
|
586 | 583 | ('show_sha_length', 12, 'int'), |
|
587 | 584 | ] |
|
588 | 585 | |
|
589 | 586 | for key, val, type_ in settings: |
|
590 | 587 | sett = RhodeCodeSetting(key, val, type_) |
|
591 | 588 | self.sa.add(sett) |
|
592 | 589 | |
|
593 | 590 | self.create_auth_plugin_options() |
|
594 | 591 | self.create_default_options() |
|
595 | 592 | |
|
596 | 593 | log.info('created ui config') |
|
597 | 594 | |
|
598 | 595 | def create_user(self, username, password, email='', admin=False, |
|
599 | 596 | strict_creation_check=True, api_key=None): |
|
600 | 597 | log.info('creating user `%s`', username) |
|
601 | 598 | user = UserModel().create_or_update( |
|
602 | 599 | username, password, email, firstname=u'RhodeCode', lastname=u'Admin', |
|
603 | 600 | active=True, admin=admin, extern_type="rhodecode", |
|
604 | 601 | strict_creation_check=strict_creation_check) |
|
605 | 602 | |
|
606 | 603 | if api_key: |
|
607 | 604 | log.info('setting a new default auth token for user `%s`', username) |
|
608 | 605 | UserModel().add_auth_token( |
|
609 | 606 | user=user, lifetime_minutes=-1, |
|
610 | 607 | role=UserModel.auth_token_role.ROLE_ALL, |
|
611 | 608 | description=u'BUILTIN TOKEN') |
|
612 | 609 | |
|
613 | 610 | def create_default_user(self): |
|
614 | 611 | log.info('creating default user') |
|
615 | 612 | # create default user for handling default permissions. |
|
616 | 613 | user = UserModel().create_or_update(username=User.DEFAULT_USER, |
|
617 | 614 | password=str(uuid.uuid1())[:20], |
|
618 | 615 | email=User.DEFAULT_USER_EMAIL, |
|
619 | 616 | firstname=u'Anonymous', |
|
620 | 617 | lastname=u'User', |
|
621 | 618 | strict_creation_check=False) |
|
622 | 619 | # based on configuration options activate/de-activate this user which |
|
623 | 620 | # controlls anonymous access |
|
624 | 621 | if self.cli_args.get('public_access') is False: |
|
625 | 622 | log.info('Public access disabled') |
|
626 | 623 | user.active = False |
|
627 | 624 | Session().add(user) |
|
628 | 625 | Session().commit() |
|
629 | 626 | |
|
630 | 627 | def create_permissions(self): |
|
631 | 628 | """ |
|
632 | 629 | Creates all permissions defined in the system |
|
633 | 630 | """ |
|
634 | 631 | # module.(access|create|change|delete)_[name] |
|
635 | 632 | # module.(none|read|write|admin) |
|
636 | 633 | log.info('creating permissions') |
|
637 | 634 | PermissionModel(self.sa).create_permissions() |
|
638 | 635 | |
|
639 | 636 | def populate_default_permissions(self): |
|
640 | 637 | """ |
|
641 | 638 | Populate default permissions. It will create only the default |
|
642 | 639 | permissions that are missing, and not alter already defined ones |
|
643 | 640 | """ |
|
644 | 641 | log.info('creating default user permissions') |
|
645 | 642 | PermissionModel(self.sa).create_default_user_permissions(user=User.DEFAULT_USER) |
@@ -1,1084 +1,1084 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2019 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import os |
|
22 | 22 | import time |
|
23 | 23 | import logging |
|
24 | 24 | import datetime |
|
25 | 25 | import traceback |
|
26 | 26 | import hashlib |
|
27 | 27 | import collections |
|
28 | 28 | |
|
29 | 29 | from sqlalchemy import * |
|
30 | 30 | from sqlalchemy.ext.hybrid import hybrid_property |
|
31 | 31 | from sqlalchemy.orm import relationship, joinedload, class_mapper, validates |
|
32 | 32 | from sqlalchemy.exc import DatabaseError |
|
33 | 33 | from beaker.cache import cache_region, region_invalidate |
|
34 | 34 | from webob.exc import HTTPNotFound |
|
35 | 35 | |
|
36 | 36 | from rhodecode.translation import _ |
|
37 | 37 | |
|
38 | 38 | from rhodecode.lib.vcs import get_backend |
|
39 | 39 | from rhodecode.lib.vcs.utils.helpers import get_scm |
|
40 | 40 | from rhodecode.lib.vcs.exceptions import VCSError |
|
41 | 41 | from zope.cachedescriptors.property import Lazy as LazyProperty |
|
42 | 42 | from rhodecode.lib.vcs.backends.base import EmptyCommit |
|
43 | 43 | |
|
44 | 44 | from rhodecode.lib.utils2 import str2bool, safe_str, get_commit_safe, \ |
|
45 | 45 | safe_unicode, remove_suffix, remove_prefix, time_to_datetime |
|
46 | 46 | from rhodecode.lib.ext_json import json |
|
47 | 47 | from rhodecode.lib.caching_query import FromCache |
|
48 | 48 | |
|
49 | 49 | from rhodecode.model.meta import Base, Session |
|
50 | 50 | |
|
51 | 51 | URL_SEP = '/' |
|
52 | 52 | log = logging.getLogger(__name__) |
|
53 | 53 | |
|
54 | 54 | #============================================================================== |
|
55 | 55 | # BASE CLASSES |
|
56 | 56 | #============================================================================== |
|
57 | 57 | |
|
58 | 58 | _hash_key = lambda k: hashlib.md5(safe_str(k)).hexdigest() |
|
59 | 59 | |
|
60 | 60 | |
|
61 | 61 | class BaseModel(object): |
|
62 | 62 | """ |
|
63 | 63 | Base Model for all classes |
|
64 | 64 | """ |
|
65 | 65 | |
|
66 | 66 | @classmethod |
|
67 | 67 | def _get_keys(cls): |
|
68 | 68 | """return column names for this model """ |
|
69 | 69 | return class_mapper(cls).c.keys() |
|
70 | 70 | |
|
71 | 71 | def get_dict(self): |
|
72 | 72 | """ |
|
73 | 73 | return dict with keys and values corresponding |
|
74 | 74 | to this model data """ |
|
75 | 75 | |
|
76 | 76 | d = {} |
|
77 | 77 | for k in self._get_keys(): |
|
78 | 78 | d[k] = getattr(self, k) |
|
79 | 79 | |
|
80 | 80 | # also use __json__() if present to get additional fields |
|
81 | 81 | _json_attr = getattr(self, '__json__', None) |
|
82 | 82 | if _json_attr: |
|
83 | 83 | # update with attributes from __json__ |
|
84 | 84 | if callable(_json_attr): |
|
85 | 85 | _json_attr = _json_attr() |
|
86 | 86 | for k, val in _json_attr.iteritems(): |
|
87 | 87 | d[k] = val |
|
88 | 88 | return d |
|
89 | 89 | |
|
90 | 90 | def get_appstruct(self): |
|
91 | 91 | """return list with keys and values tupples corresponding |
|
92 | 92 | to this model data """ |
|
93 | 93 | |
|
94 | 94 | l = [] |
|
95 | 95 | for k in self._get_keys(): |
|
96 | 96 | l.append((k, getattr(self, k),)) |
|
97 | 97 | return l |
|
98 | 98 | |
|
99 | 99 | def populate_obj(self, populate_dict): |
|
100 | 100 | """populate model with data from given populate_dict""" |
|
101 | 101 | |
|
102 | 102 | for k in self._get_keys(): |
|
103 | 103 | if k in populate_dict: |
|
104 | 104 | setattr(self, k, populate_dict[k]) |
|
105 | 105 | |
|
106 | 106 | @classmethod |
|
107 | 107 | def query(cls): |
|
108 | 108 | return Session().query(cls) |
|
109 | 109 | |
|
110 | 110 | @classmethod |
|
111 | 111 | def get(cls, id_): |
|
112 | 112 | if id_: |
|
113 | 113 | return cls.query().get(id_) |
|
114 | 114 | |
|
115 | 115 | @classmethod |
|
116 | 116 | def get_or_404(cls, id_): |
|
117 | 117 | try: |
|
118 | 118 | id_ = int(id_) |
|
119 | 119 | except (TypeError, ValueError): |
|
120 | 120 | raise HTTPNotFound |
|
121 | 121 | |
|
122 | 122 | res = cls.query().get(id_) |
|
123 | 123 | if not res: |
|
124 | 124 | raise HTTPNotFound |
|
125 | 125 | return res |
|
126 | 126 | |
|
127 | 127 | @classmethod |
|
128 | 128 | def getAll(cls): |
|
129 | 129 | # deprecated and left for backward compatibility |
|
130 | 130 | return cls.get_all() |
|
131 | 131 | |
|
132 | 132 | @classmethod |
|
133 | 133 | def get_all(cls): |
|
134 | 134 | return cls.query().all() |
|
135 | 135 | |
|
136 | 136 | @classmethod |
|
137 | 137 | def delete(cls, id_): |
|
138 | 138 | obj = cls.query().get(id_) |
|
139 | 139 | Session().delete(obj) |
|
140 | 140 | |
|
141 | 141 | def __repr__(self): |
|
142 | 142 | if hasattr(self, '__unicode__'): |
|
143 | 143 | # python repr needs to return str |
|
144 | 144 | return safe_str(self.__unicode__()) |
|
145 | 145 | return '<DB:%s>' % (self.__class__.__name__) |
|
146 | 146 | |
|
147 | 147 | |
|
148 | 148 | class RhodeCodeSetting(Base, BaseModel): |
|
149 | 149 | __tablename__ = 'rhodecode_settings' |
|
150 | 150 | __table_args__ = ( |
|
151 | 151 | UniqueConstraint('app_settings_name'), |
|
152 | 152 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
153 | 153 | 'mysql_charset': 'utf8'} |
|
154 | 154 | ) |
|
155 | 155 | app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
156 | 156 | app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None) |
|
157 | 157 | _app_settings_value = Column("app_settings_value", String(255), nullable=True, unique=None, default=None) |
|
158 | 158 | |
|
159 | 159 | def __init__(self, k='', v=''): |
|
160 | 160 | self.app_settings_name = k |
|
161 | 161 | self.app_settings_value = v |
|
162 | 162 | |
|
163 | 163 | @validates('_app_settings_value') |
|
164 | 164 | def validate_settings_value(self, key, val): |
|
165 | 165 | assert type(val) == unicode |
|
166 | 166 | return val |
|
167 | 167 | |
|
168 | 168 | @hybrid_property |
|
169 | 169 | def app_settings_value(self): |
|
170 | 170 | v = self._app_settings_value |
|
171 | 171 | if self.app_settings_name in ["ldap_active", |
|
172 | 172 | "default_repo_enable_statistics", |
|
173 | 173 | "default_repo_enable_locking", |
|
174 | 174 | "default_repo_private", |
|
175 | 175 | "default_repo_enable_downloads"]: |
|
176 | 176 | v = str2bool(v) |
|
177 | 177 | return v |
|
178 | 178 | |
|
179 | 179 | @app_settings_value.setter |
|
180 | 180 | def app_settings_value(self, val): |
|
181 | 181 | """ |
|
182 | 182 | Setter that will always make sure we use unicode in app_settings_value |
|
183 | 183 | |
|
184 | 184 | :param val: |
|
185 | 185 | """ |
|
186 | 186 | self._app_settings_value = safe_unicode(val) |
|
187 | 187 | |
|
188 | 188 | def __unicode__(self): |
|
189 | 189 | return u"<%s('%s:%s')>" % ( |
|
190 | 190 | self.__class__.__name__, |
|
191 | 191 | self.app_settings_name, self.app_settings_value |
|
192 | 192 | ) |
|
193 | 193 | |
|
194 | 194 | |
|
195 | 195 | class RhodeCodeUi(Base, BaseModel): |
|
196 | 196 | __tablename__ = 'rhodecode_ui' |
|
197 | 197 | __table_args__ = ( |
|
198 | 198 | UniqueConstraint('ui_key'), |
|
199 | 199 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
200 | 200 | 'mysql_charset': 'utf8'} |
|
201 | 201 | ) |
|
202 | 202 | |
|
203 | 203 | HOOK_REPO_SIZE = 'changegroup.repo_size' |
|
204 | 204 | HOOK_PUSH = 'changegroup.push_logger' |
|
205 | 205 | HOOK_PRE_PUSH = 'prechangegroup.pre_push' |
|
206 | 206 | HOOK_PULL = 'outgoing.pull_logger' |
|
207 | 207 | HOOK_PRE_PULL = 'preoutgoing.pre_pull' |
|
208 | 208 | |
|
209 | 209 | ui_id = Column("ui_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
210 | 210 | ui_section = Column("ui_section", String(255), nullable=True, unique=None, default=None) |
|
211 | 211 | ui_key = Column("ui_key", String(255), nullable=True, unique=None, default=None) |
|
212 | 212 | ui_value = Column("ui_value", String(255), nullable=True, unique=None, default=None) |
|
213 | 213 | ui_active = Column("ui_active", Boolean(), nullable=True, unique=None, default=True) |
|
214 | 214 | |
|
215 | 215 | |
|
216 | 216 | |
|
217 | 217 | class User(Base, BaseModel): |
|
218 | 218 | __tablename__ = 'users' |
|
219 | 219 | __table_args__ = ( |
|
220 | 220 | UniqueConstraint('username'), UniqueConstraint('email'), |
|
221 | 221 | Index('u_username_idx', 'username'), |
|
222 | 222 | Index('u_email_idx', 'email'), |
|
223 | 223 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
224 | 224 | 'mysql_charset': 'utf8'} |
|
225 | 225 | ) |
|
226 | 226 | DEFAULT_USER = 'default' |
|
227 | 227 | DEFAULT_PERMISSIONS = [ |
|
228 | 228 | 'hg.register.manual_activate', 'hg.create.repository', |
|
229 | 229 | 'hg.fork.repository', 'repository.read', 'group.read' |
|
230 | 230 | ] |
|
231 | 231 | user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
232 | 232 | username = Column("username", String(255), nullable=True, unique=None, default=None) |
|
233 | 233 | password = Column("password", String(255), nullable=True, unique=None, default=None) |
|
234 | 234 | active = Column("active", Boolean(), nullable=True, unique=None, default=True) |
|
235 | 235 | admin = Column("admin", Boolean(), nullable=True, unique=None, default=False) |
|
236 | 236 | name = Column("firstname", String(255), nullable=True, unique=None, default=None) |
|
237 | 237 | lastname = Column("lastname", String(255), nullable=True, unique=None, default=None) |
|
238 | 238 | _email = Column("email", String(255), nullable=True, unique=None, default=None) |
|
239 | 239 | last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None) |
|
240 | 240 | ldap_dn = Column("ldap_dn", String(255), nullable=True, unique=None, default=None) |
|
241 | 241 | api_key = Column("api_key", String(255), nullable=True, unique=None, default=None) |
|
242 | 242 | inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True) |
|
243 | 243 | |
|
244 | 244 | user_log = relationship('UserLog') |
|
245 | 245 | user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all') |
|
246 | 246 | |
|
247 | 247 | repositories = relationship('Repository') |
|
248 | 248 | user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all') |
|
249 | 249 | followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all') |
|
250 | 250 | |
|
251 | 251 | repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all') |
|
252 | 252 | repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all') |
|
253 | 253 | |
|
254 | 254 | group_member = relationship('UserGroupMember', cascade='all') |
|
255 | 255 | |
|
256 | 256 | notifications = relationship('UserNotification', cascade='all') |
|
257 | 257 | # notifications assigned to this user |
|
258 | 258 | user_created_notifications = relationship('Notification', cascade='all') |
|
259 | 259 | # comments created by this user |
|
260 | 260 | user_comments = relationship('ChangesetComment', cascade='all') |
|
261 | 261 | user_emails = relationship('UserEmailMap', cascade='all') |
|
262 | 262 | |
|
263 | 263 | @hybrid_property |
|
264 | 264 | def email(self): |
|
265 | 265 | return self._email |
|
266 | 266 | |
|
267 | 267 | @email.setter |
|
268 | 268 | def email(self, val): |
|
269 | 269 | self._email = val.lower() if val else None |
|
270 | 270 | |
|
271 | 271 | @property |
|
272 | 272 | def firstname(self): |
|
273 | 273 | # alias for future |
|
274 | 274 | return self.name |
|
275 | 275 | |
|
276 | 276 | @property |
|
277 | 277 | def username_and_name(self): |
|
278 | 278 | return '%s (%s %s)' % (self.username, self.firstname, self.lastname) |
|
279 | 279 | |
|
280 | 280 | @property |
|
281 | 281 | def full_name(self): |
|
282 | 282 | return '%s %s' % (self.firstname, self.lastname) |
|
283 | 283 | |
|
284 | 284 | @property |
|
285 | 285 | def full_contact(self): |
|
286 | 286 | return '%s %s <%s>' % (self.firstname, self.lastname, self.email) |
|
287 | 287 | |
|
288 | 288 | @property |
|
289 | 289 | def short_contact(self): |
|
290 | 290 | return '%s %s' % (self.firstname, self.lastname) |
|
291 | 291 | |
|
292 | 292 | @property |
|
293 | 293 | def is_admin(self): |
|
294 | 294 | return self.admin |
|
295 | 295 | |
|
296 | 296 | @classmethod |
|
297 | 297 | def get_by_username(cls, username, case_insensitive=False, cache=False): |
|
298 | 298 | if case_insensitive: |
|
299 | 299 | q = cls.query().filter(cls.username.ilike(username)) |
|
300 | 300 | else: |
|
301 | 301 | q = cls.query().filter(cls.username == username) |
|
302 | 302 | |
|
303 | 303 | if cache: |
|
304 | 304 | q = q.options(FromCache( |
|
305 | 305 | "sql_cache_short", |
|
306 | 306 | "get_user_%s" % _hash_key(username) |
|
307 | 307 | ) |
|
308 | 308 | ) |
|
309 | 309 | return q.scalar() |
|
310 | 310 | |
|
311 | 311 | @classmethod |
|
312 | 312 | def get_by_auth_token(cls, auth_token, cache=False): |
|
313 | 313 | q = cls.query().filter(cls.api_key == auth_token) |
|
314 | 314 | |
|
315 | 315 | if cache: |
|
316 | 316 | q = q.options(FromCache("sql_cache_short", |
|
317 | 317 | "get_auth_token_%s" % auth_token)) |
|
318 | 318 | return q.scalar() |
|
319 | 319 | |
|
320 | 320 | @classmethod |
|
321 | 321 | def get_by_email(cls, email, case_insensitive=False, cache=False): |
|
322 | 322 | if case_insensitive: |
|
323 | 323 | q = cls.query().filter(cls.email.ilike(email)) |
|
324 | 324 | else: |
|
325 | 325 | q = cls.query().filter(cls.email == email) |
|
326 | 326 | |
|
327 | 327 | if cache: |
|
328 | 328 | q = q.options(FromCache("sql_cache_short", |
|
329 | 329 | "get_email_key_%s" % email)) |
|
330 | 330 | |
|
331 | 331 | ret = q.scalar() |
|
332 | 332 | if ret is None: |
|
333 | 333 | q = UserEmailMap.query() |
|
334 | 334 | # try fetching in alternate email map |
|
335 | 335 | if case_insensitive: |
|
336 | 336 | q = q.filter(UserEmailMap.email.ilike(email)) |
|
337 | 337 | else: |
|
338 | 338 | q = q.filter(UserEmailMap.email == email) |
|
339 | 339 | q = q.options(joinedload(UserEmailMap.user)) |
|
340 | 340 | if cache: |
|
341 | 341 | q = q.options(FromCache("sql_cache_short", |
|
342 | 342 | "get_email_map_key_%s" % email)) |
|
343 | 343 | ret = getattr(q.scalar(), 'user', None) |
|
344 | 344 | |
|
345 | 345 | return ret |
|
346 | 346 | |
|
347 | 347 | |
|
348 | 348 | class UserEmailMap(Base, BaseModel): |
|
349 | 349 | __tablename__ = 'user_email_map' |
|
350 | 350 | __table_args__ = ( |
|
351 | 351 | Index('uem_email_idx', 'email'), |
|
352 | 352 | UniqueConstraint('email'), |
|
353 | 353 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
354 | 354 | 'mysql_charset': 'utf8'} |
|
355 | 355 | ) |
|
356 | 356 | __mapper_args__ = {} |
|
357 | 357 | |
|
358 | 358 | email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
359 | 359 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) |
|
360 | 360 | _email = Column("email", String(255), nullable=True, unique=False, default=None) |
|
361 | 361 | user = relationship('User', lazy='joined') |
|
362 | 362 | |
|
363 | 363 | @validates('_email') |
|
364 | 364 | def validate_email(self, key, email): |
|
365 | 365 | # check if this email is not main one |
|
366 | 366 | main_email = Session().query(User).filter(User.email == email).scalar() |
|
367 | 367 | if main_email is not None: |
|
368 | 368 | raise AttributeError('email %s is present is user table' % email) |
|
369 | 369 | return email |
|
370 | 370 | |
|
371 | 371 | @hybrid_property |
|
372 | 372 | def email(self): |
|
373 | 373 | return self._email |
|
374 | 374 | |
|
375 | 375 | @email.setter |
|
376 | 376 | def email(self, val): |
|
377 | 377 | self._email = val.lower() if val else None |
|
378 | 378 | |
|
379 | 379 | |
|
380 | 380 | class UserIpMap(Base, BaseModel): |
|
381 | 381 | __tablename__ = 'user_ip_map' |
|
382 | 382 | __table_args__ = ( |
|
383 | 383 | UniqueConstraint('user_id', 'ip_addr'), |
|
384 | 384 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
385 | 385 | 'mysql_charset': 'utf8'} |
|
386 | 386 | ) |
|
387 | 387 | __mapper_args__ = {} |
|
388 | 388 | |
|
389 | 389 | ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
390 | 390 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) |
|
391 | 391 | ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None) |
|
392 | 392 | active = Column("active", Boolean(), nullable=True, unique=None, default=True) |
|
393 | 393 | user = relationship('User', lazy='joined') |
|
394 | 394 | |
|
395 | 395 | |
|
396 | 396 | class UserLog(Base, BaseModel): |
|
397 | 397 | __tablename__ = 'user_logs' |
|
398 | 398 | __table_args__ = ( |
|
399 | 399 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
400 | 400 | 'mysql_charset': 'utf8'}, |
|
401 | 401 | ) |
|
402 | 402 | user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
403 | 403 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) |
|
404 | 404 | username = Column("username", String(255), nullable=True, unique=None, default=None) |
|
405 | 405 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True) |
|
406 | 406 | repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None) |
|
407 | 407 | user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None) |
|
408 | 408 | action = Column("action", String(1200000), nullable=True, unique=None, default=None) |
|
409 | 409 | action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None) |
|
410 | 410 | |
|
411 | 411 | |
|
412 | 412 | user = relationship('User') |
|
413 | 413 | repository = relationship('Repository', cascade='') |
|
414 | 414 | |
|
415 | 415 | |
|
416 | 416 | class UserGroup(Base, BaseModel): |
|
417 | 417 | __tablename__ = 'users_groups' |
|
418 | 418 | __table_args__ = ( |
|
419 | 419 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
420 | 420 | 'mysql_charset': 'utf8'}, |
|
421 | 421 | ) |
|
422 | 422 | |
|
423 | 423 | users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
424 | 424 | users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None) |
|
425 | 425 | users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None) |
|
426 | 426 | inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True) |
|
427 | 427 | |
|
428 | 428 | members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined") |
|
429 | 429 | users_group_to_perm = relationship('UserGroupToPerm', cascade='all') |
|
430 | 430 | users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all') |
|
431 | 431 | |
|
432 | 432 | def __unicode__(self): |
|
433 | 433 | return u'<userGroup(%s)>' % (self.users_group_name) |
|
434 | 434 | |
|
435 | 435 | @classmethod |
|
436 | 436 | def get_by_group_name(cls, group_name, cache=False, |
|
437 | 437 | case_insensitive=False): |
|
438 | 438 | if case_insensitive: |
|
439 | 439 | q = cls.query().filter(cls.users_group_name.ilike(group_name)) |
|
440 | 440 | else: |
|
441 | 441 | q = cls.query().filter(cls.users_group_name == group_name) |
|
442 | 442 | if cache: |
|
443 | 443 | q = q.options(FromCache( |
|
444 | 444 | "sql_cache_short", |
|
445 | 445 | "get_user_%s" % _hash_key(group_name) |
|
446 | 446 | ) |
|
447 | 447 | ) |
|
448 | 448 | return q.scalar() |
|
449 | 449 | |
|
450 | 450 | @classmethod |
|
451 | 451 | def get(cls, users_group_id, cache=False): |
|
452 | 452 | user_group = cls.query() |
|
453 | 453 | if cache: |
|
454 | 454 | user_group = user_group.options(FromCache("sql_cache_short", |
|
455 | 455 | "get_users_group_%s" % users_group_id)) |
|
456 | 456 | return user_group.get(users_group_id) |
|
457 | 457 | |
|
458 | 458 | |
|
459 | 459 | class UserGroupMember(Base, BaseModel): |
|
460 | 460 | __tablename__ = 'users_groups_members' |
|
461 | 461 | __table_args__ = ( |
|
462 | 462 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
463 | 463 | 'mysql_charset': 'utf8'}, |
|
464 | 464 | ) |
|
465 | 465 | |
|
466 | 466 | users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
467 | 467 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
|
468 | 468 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
469 | 469 | |
|
470 | 470 | user = relationship('User', lazy='joined') |
|
471 | 471 | users_group = relationship('UserGroup') |
|
472 | 472 | |
|
473 | 473 | def __init__(self, gr_id='', u_id=''): |
|
474 | 474 | self.users_group_id = gr_id |
|
475 | 475 | self.user_id = u_id |
|
476 | 476 | |
|
477 | 477 | |
|
478 | 478 | class RepositoryField(Base, BaseModel): |
|
479 | 479 | __tablename__ = 'repositories_fields' |
|
480 | 480 | __table_args__ = ( |
|
481 | 481 | UniqueConstraint('repository_id', 'field_key'), # no-multi field |
|
482 | 482 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
483 | 483 | 'mysql_charset': 'utf8'}, |
|
484 | 484 | ) |
|
485 | 485 | PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields |
|
486 | 486 | |
|
487 | 487 | repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
488 | 488 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) |
|
489 | 489 | field_key = Column("field_key", String(250)) |
|
490 | 490 | field_label = Column("field_label", String(1024), nullable=False) |
|
491 | 491 | field_value = Column("field_value", String(10000), nullable=False) |
|
492 | 492 | field_desc = Column("field_desc", String(1024), nullable=False) |
|
493 | 493 | field_type = Column("field_type", String(256), nullable=False, unique=None) |
|
494 | 494 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
495 | 495 | |
|
496 | 496 | repository = relationship('Repository') |
|
497 | 497 | |
|
498 | 498 | @classmethod |
|
499 | 499 | def get_by_key_name(cls, key, repo): |
|
500 | 500 | row = cls.query()\ |
|
501 | 501 | .filter(cls.repository == repo)\ |
|
502 | 502 | .filter(cls.field_key == key).scalar() |
|
503 | 503 | return row |
|
504 | 504 | |
|
505 | 505 | |
|
506 | 506 | class Repository(Base, BaseModel): |
|
507 | 507 | __tablename__ = 'repositories' |
|
508 | 508 | __table_args__ = ( |
|
509 | 509 | UniqueConstraint('repo_name'), |
|
510 | 510 | Index('r_repo_name_idx', 'repo_name'), |
|
511 | 511 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
512 | 512 | 'mysql_charset': 'utf8'}, |
|
513 | 513 | ) |
|
514 | 514 | |
|
515 | 515 | repo_id = Column("repo_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
516 | 516 | repo_name = Column("repo_name", String(255), nullable=False, unique=True, default=None) |
|
517 | 517 | clone_uri = Column("clone_uri", String(255), nullable=True, unique=False, default=None) |
|
518 | 518 | repo_type = Column("repo_type", String(255), nullable=False, unique=False, default=None) |
|
519 | 519 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None) |
|
520 | 520 | private = Column("private", Boolean(), nullable=True, unique=None, default=None) |
|
521 | 521 | enable_statistics = Column("statistics", Boolean(), nullable=True, unique=None, default=True) |
|
522 | 522 | enable_downloads = Column("downloads", Boolean(), nullable=True, unique=None, default=True) |
|
523 | 523 | description = Column("description", String(10000), nullable=True, unique=None, default=None) |
|
524 | 524 | created_on = Column('created_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now) |
|
525 | 525 | updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now) |
|
526 | 526 | landing_rev = Column("landing_revision", String(255), nullable=False, unique=False, default=None) |
|
527 | 527 | enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False) |
|
528 | 528 | _locked = Column("locked", String(255), nullable=True, unique=False, default=None) |
|
529 | 529 | _changeset_cache = Column("changeset_cache", LargeBinary(), nullable=True) #JSON data |
|
530 | 530 | |
|
531 | 531 | fork_id = Column("fork_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=False, default=None) |
|
532 | 532 | group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=False, default=None) |
|
533 | 533 | |
|
534 | 534 | user = relationship('User') |
|
535 | 535 | fork = relationship('Repository', remote_side=repo_id) |
|
536 | 536 | group = relationship('RepoGroup') |
|
537 | 537 | repo_to_perm = relationship('UserRepoToPerm', cascade='all', order_by='UserRepoToPerm.repo_to_perm_id') |
|
538 | 538 | users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all') |
|
539 | 539 | stats = relationship('Statistics', cascade='all', uselist=False) |
|
540 | 540 | |
|
541 | 541 | followers = relationship('UserFollowing', |
|
542 | 542 | primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id', |
|
543 | 543 | cascade='all') |
|
544 | 544 | extra_fields = relationship('RepositoryField', |
|
545 | 545 | cascade="all, delete, delete-orphan") |
|
546 | 546 | |
|
547 | 547 | logs = relationship('UserLog') |
|
548 | 548 | comments = relationship('ChangesetComment', cascade="all, delete, delete-orphan") |
|
549 | 549 | |
|
550 | 550 | pull_requests_org = relationship('PullRequest', |
|
551 | 551 | primaryjoin='PullRequest.org_repo_id==Repository.repo_id', |
|
552 | 552 | cascade="all, delete, delete-orphan") |
|
553 | 553 | |
|
554 | 554 | pull_requests_other = relationship('PullRequest', |
|
555 | 555 | primaryjoin='PullRequest.other_repo_id==Repository.repo_id', |
|
556 | 556 | cascade="all, delete, delete-orphan") |
|
557 | 557 | |
|
558 | 558 | def __unicode__(self): |
|
559 | 559 | return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id, |
|
560 | 560 | safe_unicode(self.repo_name)) |
|
561 | 561 | |
|
562 | 562 | #NOTE for this migration we are required tio have it |
|
563 | 563 | @hybrid_property |
|
564 | 564 | def changeset_cache(self): |
|
565 | 565 | from rhodecode.lib.vcs.backends.base import EmptyCommit |
|
566 | 566 | dummy = EmptyCommit().__json__() |
|
567 | 567 | if not self._changeset_cache: |
|
568 | 568 | return dummy |
|
569 | 569 | try: |
|
570 | 570 | return json.loads(self._changeset_cache) |
|
571 | 571 | except TypeError: |
|
572 | 572 | return dummy |
|
573 | 573 | |
|
574 | 574 | @changeset_cache.setter |
|
575 | 575 | def changeset_cache(self, val): |
|
576 | 576 | try: |
|
577 | 577 | self._changeset_cache = json.dumps(val) |
|
578 | 578 | except Exception: |
|
579 | 579 | log.error(traceback.format_exc()) |
|
580 | 580 | |
|
581 | 581 | @classmethod |
|
582 | 582 | def get_by_repo_name(cls, repo_name): |
|
583 | 583 | q = Session().query(cls).filter(cls.repo_name == repo_name) |
|
584 | 584 | q = q.options(joinedload(Repository.fork))\ |
|
585 | 585 | .options(joinedload(Repository.user))\ |
|
586 | 586 | .options(joinedload(Repository.group)) |
|
587 | 587 | return q.scalar() |
|
588 | 588 | |
|
589 | 589 | #NOTE this is required for this migration to work |
|
590 | 590 | def update_commit_cache(self, cs_cache=None): |
|
591 | 591 | """ |
|
592 | 592 | Update cache of last changeset for repository, keys should be:: |
|
593 | 593 | |
|
594 | 594 | short_id |
|
595 | 595 | raw_id |
|
596 | 596 | revision |
|
597 | 597 | message |
|
598 | 598 | date |
|
599 | 599 | author |
|
600 | 600 | |
|
601 | 601 | :param cs_cache: |
|
602 | 602 | """ |
|
603 | 603 | from rhodecode.lib.vcs.backends.base import BaseChangeset |
|
604 | 604 | if cs_cache is None: |
|
605 | 605 | cs_cache = EmptyCommit() |
|
606 | 606 | # Note: Using always the empty commit here in case we are |
|
607 | 607 | # upgrading towards version 3.0 and above. Reason is that in this |
|
608 | 608 | # case the vcsclient connection is not available and things |
|
609 | 609 | # would explode here. |
|
610 | 610 | |
|
611 | 611 | if isinstance(cs_cache, BaseChangeset): |
|
612 | 612 | cs_cache = cs_cache.__json__() |
|
613 | 613 | |
|
614 | 614 | if (cs_cache != self.changeset_cache or not self.changeset_cache): |
|
615 | 615 | _default = datetime.datetime.fromtimestamp(0) |
|
616 | 616 | last_change = cs_cache.get('date') or _default |
|
617 |
log.debug('updated repo %s with new c |
|
|
617 | log.debug('updated repo %s with new commit cache %s', self.repo_name, cs_cache) | |
|
618 | 618 | self.updated_on = last_change |
|
619 | 619 | self.changeset_cache = cs_cache |
|
620 | 620 | Session().add(self) |
|
621 | 621 | Session().commit() |
|
622 | 622 | else: |
|
623 | 623 | log.debug('Skipping repo:%s already with latest changes', self.repo_name) |
|
624 | 624 | |
|
625 | 625 | class RepoGroup(Base, BaseModel): |
|
626 | 626 | __tablename__ = 'groups' |
|
627 | 627 | __table_args__ = ( |
|
628 | 628 | UniqueConstraint('group_name', 'group_parent_id'), |
|
629 | 629 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
630 | 630 | 'mysql_charset': 'utf8'}, |
|
631 | 631 | ) |
|
632 | 632 | __mapper_args__ = {'order_by': 'group_name'} |
|
633 | 633 | |
|
634 | 634 | group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
635 | 635 | group_name = Column("group_name", String(255), nullable=False, unique=True, default=None) |
|
636 | 636 | group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None) |
|
637 | 637 | group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None) |
|
638 | 638 | enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False) |
|
639 | 639 | |
|
640 | 640 | repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id') |
|
641 | 641 | users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all') |
|
642 | 642 | parent_group = relationship('RepoGroup', remote_side=group_id) |
|
643 | 643 | |
|
644 | 644 | def __init__(self, group_name='', parent_group=None): |
|
645 | 645 | self.group_name = group_name |
|
646 | 646 | self.parent_group = parent_group |
|
647 | 647 | |
|
648 | 648 | def __unicode__(self): |
|
649 | 649 | return u"<%s('%s:%s')>" % (self.__class__.__name__, self.group_id, |
|
650 | 650 | self.group_name) |
|
651 | 651 | |
|
652 | 652 | @classmethod |
|
653 | 653 | def url_sep(cls): |
|
654 | 654 | return URL_SEP |
|
655 | 655 | |
|
656 | 656 | @classmethod |
|
657 | 657 | def get_by_group_name(cls, group_name, cache=False, case_insensitive=False): |
|
658 | 658 | if case_insensitive: |
|
659 | 659 | gr = cls.query()\ |
|
660 | 660 | .filter(cls.group_name.ilike(group_name)) |
|
661 | 661 | else: |
|
662 | 662 | gr = cls.query()\ |
|
663 | 663 | .filter(cls.group_name == group_name) |
|
664 | 664 | if cache: |
|
665 | 665 | gr = gr.options(FromCache( |
|
666 | 666 | "sql_cache_short", |
|
667 | 667 | "get_group_%s" % _hash_key(group_name) |
|
668 | 668 | ) |
|
669 | 669 | ) |
|
670 | 670 | return gr.scalar() |
|
671 | 671 | |
|
672 | 672 | |
|
673 | 673 | class Permission(Base, BaseModel): |
|
674 | 674 | __tablename__ = 'permissions' |
|
675 | 675 | __table_args__ = ( |
|
676 | 676 | Index('p_perm_name_idx', 'permission_name'), |
|
677 | 677 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
678 | 678 | 'mysql_charset': 'utf8'}, |
|
679 | 679 | ) |
|
680 | 680 | PERMS = [ |
|
681 | 681 | ('repository.none', _('Repository no access')), |
|
682 | 682 | ('repository.read', _('Repository read access')), |
|
683 | 683 | ('repository.write', _('Repository write access')), |
|
684 | 684 | ('repository.admin', _('Repository admin access')), |
|
685 | 685 | |
|
686 | 686 | ('group.none', _('Repository group no access')), |
|
687 | 687 | ('group.read', _('Repository group read access')), |
|
688 | 688 | ('group.write', _('Repository group write access')), |
|
689 | 689 | ('group.admin', _('Repository group admin access')), |
|
690 | 690 | |
|
691 | 691 | ('hg.admin', _('RhodeCode Administrator')), |
|
692 | 692 | ('hg.create.none', _('Repository creation disabled')), |
|
693 | 693 | ('hg.create.repository', _('Repository creation enabled')), |
|
694 | 694 | ('hg.fork.none', _('Repository forking disabled')), |
|
695 | 695 | ('hg.fork.repository', _('Repository forking enabled')), |
|
696 | 696 | ('hg.register.none', _('Register disabled')), |
|
697 | 697 | ('hg.register.manual_activate', _('Register new user with RhodeCode ' |
|
698 | 698 | 'with manual activation')), |
|
699 | 699 | |
|
700 | 700 | ('hg.register.auto_activate', _('Register new user with RhodeCode ' |
|
701 | 701 | 'with auto activation')), |
|
702 | 702 | ] |
|
703 | 703 | |
|
704 | 704 | # defines which permissions are more important higher the more important |
|
705 | 705 | PERM_WEIGHTS = { |
|
706 | 706 | 'repository.none': 0, |
|
707 | 707 | 'repository.read': 1, |
|
708 | 708 | 'repository.write': 3, |
|
709 | 709 | 'repository.admin': 4, |
|
710 | 710 | |
|
711 | 711 | 'group.none': 0, |
|
712 | 712 | 'group.read': 1, |
|
713 | 713 | 'group.write': 3, |
|
714 | 714 | 'group.admin': 4, |
|
715 | 715 | |
|
716 | 716 | 'hg.fork.none': 0, |
|
717 | 717 | 'hg.fork.repository': 1, |
|
718 | 718 | 'hg.create.none': 0, |
|
719 | 719 | 'hg.create.repository':1 |
|
720 | 720 | } |
|
721 | 721 | |
|
722 | 722 | permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
723 | 723 | permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None) |
|
724 | 724 | permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None) |
|
725 | 725 | |
|
726 | 726 | def __unicode__(self): |
|
727 | 727 | return u"<%s('%s:%s')>" % ( |
|
728 | 728 | self.__class__.__name__, self.permission_id, self.permission_name |
|
729 | 729 | ) |
|
730 | 730 | |
|
731 | 731 | @classmethod |
|
732 | 732 | def get_by_key(cls, key): |
|
733 | 733 | return cls.query().filter(cls.permission_name == key).scalar() |
|
734 | 734 | |
|
735 | 735 | |
|
736 | 736 | class UserRepoToPerm(Base, BaseModel): |
|
737 | 737 | __tablename__ = 'repo_to_perm' |
|
738 | 738 | __table_args__ = ( |
|
739 | 739 | UniqueConstraint('user_id', 'repository_id', 'permission_id'), |
|
740 | 740 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
741 | 741 | 'mysql_charset': 'utf8'} |
|
742 | 742 | ) |
|
743 | 743 | repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
744 | 744 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
745 | 745 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
746 | 746 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) |
|
747 | 747 | |
|
748 | 748 | user = relationship('User') |
|
749 | 749 | repository = relationship('Repository') |
|
750 | 750 | permission = relationship('Permission') |
|
751 | 751 | |
|
752 | 752 | def __unicode__(self): |
|
753 | 753 | return u'<user:%s => %s >' % (self.user, self.repository) |
|
754 | 754 | |
|
755 | 755 | |
|
756 | 756 | class UserToPerm(Base, BaseModel): |
|
757 | 757 | __tablename__ = 'user_to_perm' |
|
758 | 758 | __table_args__ = ( |
|
759 | 759 | UniqueConstraint('user_id', 'permission_id'), |
|
760 | 760 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
761 | 761 | 'mysql_charset': 'utf8'} |
|
762 | 762 | ) |
|
763 | 763 | user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
764 | 764 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
765 | 765 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
766 | 766 | |
|
767 | 767 | user = relationship('User') |
|
768 | 768 | permission = relationship('Permission', lazy='joined') |
|
769 | 769 | |
|
770 | 770 | |
|
771 | 771 | class UserGroupRepoToPerm(Base, BaseModel): |
|
772 | 772 | __tablename__ = 'users_group_repo_to_perm' |
|
773 | 773 | __table_args__ = ( |
|
774 | 774 | UniqueConstraint('repository_id', 'users_group_id', 'permission_id'), |
|
775 | 775 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
776 | 776 | 'mysql_charset': 'utf8'} |
|
777 | 777 | ) |
|
778 | 778 | users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
779 | 779 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
|
780 | 780 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
781 | 781 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) |
|
782 | 782 | |
|
783 | 783 | users_group = relationship('UserGroup') |
|
784 | 784 | permission = relationship('Permission') |
|
785 | 785 | repository = relationship('Repository') |
|
786 | 786 | |
|
787 | 787 | def __unicode__(self): |
|
788 | 788 | return u'<userGroup:%s => %s >' % (self.users_group, self.repository) |
|
789 | 789 | |
|
790 | 790 | |
|
791 | 791 | class UserGroupToPerm(Base, BaseModel): |
|
792 | 792 | __tablename__ = 'users_group_to_perm' |
|
793 | 793 | __table_args__ = ( |
|
794 | 794 | UniqueConstraint('users_group_id', 'permission_id',), |
|
795 | 795 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
796 | 796 | 'mysql_charset': 'utf8'} |
|
797 | 797 | ) |
|
798 | 798 | users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
799 | 799 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
|
800 | 800 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
801 | 801 | |
|
802 | 802 | users_group = relationship('UserGroup') |
|
803 | 803 | permission = relationship('Permission') |
|
804 | 804 | |
|
805 | 805 | |
|
806 | 806 | class UserRepoGroupToPerm(Base, BaseModel): |
|
807 | 807 | __tablename__ = 'user_repo_group_to_perm' |
|
808 | 808 | __table_args__ = ( |
|
809 | 809 | UniqueConstraint('user_id', 'group_id', 'permission_id'), |
|
810 | 810 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
811 | 811 | 'mysql_charset': 'utf8'} |
|
812 | 812 | ) |
|
813 | 813 | |
|
814 | 814 | group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
815 | 815 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
816 | 816 | group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None) |
|
817 | 817 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
818 | 818 | |
|
819 | 819 | user = relationship('User') |
|
820 | 820 | group = relationship('RepoGroup') |
|
821 | 821 | permission = relationship('Permission') |
|
822 | 822 | |
|
823 | 823 | |
|
824 | 824 | class UserGroupRepoGroupToPerm(Base, BaseModel): |
|
825 | 825 | __tablename__ = 'users_group_repo_group_to_perm' |
|
826 | 826 | __table_args__ = ( |
|
827 | 827 | UniqueConstraint('users_group_id', 'group_id'), |
|
828 | 828 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
829 | 829 | 'mysql_charset': 'utf8'} |
|
830 | 830 | ) |
|
831 | 831 | |
|
832 | 832 | users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
833 | 833 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
|
834 | 834 | group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None) |
|
835 | 835 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
836 | 836 | |
|
837 | 837 | users_group = relationship('UserGroup') |
|
838 | 838 | permission = relationship('Permission') |
|
839 | 839 | group = relationship('RepoGroup') |
|
840 | 840 | |
|
841 | 841 | |
|
842 | 842 | class Statistics(Base, BaseModel): |
|
843 | 843 | __tablename__ = 'statistics' |
|
844 | 844 | __table_args__ = ( |
|
845 | 845 | UniqueConstraint('repository_id'), |
|
846 | 846 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
847 | 847 | 'mysql_charset': 'utf8'} |
|
848 | 848 | ) |
|
849 | 849 | stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
850 | 850 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None) |
|
851 | 851 | stat_on_revision = Column("stat_on_revision", Integer(), nullable=False) |
|
852 | 852 | commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data |
|
853 | 853 | commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data |
|
854 | 854 | languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data |
|
855 | 855 | |
|
856 | 856 | repository = relationship('Repository', single_parent=True) |
|
857 | 857 | |
|
858 | 858 | |
|
859 | 859 | class UserFollowing(Base, BaseModel): |
|
860 | 860 | __tablename__ = 'user_followings' |
|
861 | 861 | __table_args__ = ( |
|
862 | 862 | UniqueConstraint('user_id', 'follows_repository_id'), |
|
863 | 863 | UniqueConstraint('user_id', 'follows_user_id'), |
|
864 | 864 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
865 | 865 | 'mysql_charset': 'utf8'} |
|
866 | 866 | ) |
|
867 | 867 | |
|
868 | 868 | user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
869 | 869 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
870 | 870 | follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None) |
|
871 | 871 | follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) |
|
872 | 872 | follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now) |
|
873 | 873 | |
|
874 | 874 | user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id') |
|
875 | 875 | |
|
876 | 876 | follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id') |
|
877 | 877 | follows_repository = relationship('Repository', order_by='Repository.repo_name') |
|
878 | 878 | |
|
879 | 879 | |
|
880 | 880 | class CacheInvalidation(Base, BaseModel): |
|
881 | 881 | __tablename__ = 'cache_invalidation' |
|
882 | 882 | __table_args__ = ( |
|
883 | 883 | UniqueConstraint('cache_key'), |
|
884 | 884 | Index('key_idx', 'cache_key'), |
|
885 | 885 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
886 | 886 | 'mysql_charset': 'utf8'}, |
|
887 | 887 | ) |
|
888 | 888 | cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
889 | 889 | cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None) |
|
890 | 890 | cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None) |
|
891 | 891 | cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False) |
|
892 | 892 | |
|
893 | 893 | def __init__(self, cache_key, cache_args=''): |
|
894 | 894 | self.cache_key = cache_key |
|
895 | 895 | self.cache_args = cache_args |
|
896 | 896 | self.cache_active = False |
|
897 | 897 | |
|
898 | 898 | |
|
899 | 899 | class ChangesetComment(Base, BaseModel): |
|
900 | 900 | __tablename__ = 'changeset_comments' |
|
901 | 901 | __table_args__ = ( |
|
902 | 902 | Index('cc_revision_idx', 'revision'), |
|
903 | 903 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
904 | 904 | 'mysql_charset': 'utf8'}, |
|
905 | 905 | ) |
|
906 | 906 | comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True) |
|
907 | 907 | repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False) |
|
908 | 908 | revision = Column('revision', String(40), nullable=True) |
|
909 | 909 | pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True) |
|
910 | 910 | line_no = Column('line_no', Unicode(10), nullable=True) |
|
911 | 911 | hl_lines = Column('hl_lines', Unicode(512), nullable=True) |
|
912 | 912 | f_path = Column('f_path', Unicode(1000), nullable=True) |
|
913 | 913 | user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False) |
|
914 | 914 | text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False) |
|
915 | 915 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
916 | 916 | modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
917 | 917 | |
|
918 | 918 | author = relationship('User', lazy='joined') |
|
919 | 919 | repo = relationship('Repository') |
|
920 | 920 | status_change = relationship('ChangesetStatus', cascade="all, delete, delete-orphan") |
|
921 | 921 | pull_request = relationship('PullRequest', lazy='joined') |
|
922 | 922 | |
|
923 | 923 | @classmethod |
|
924 | 924 | def get_users(cls, revision=None, pull_request_id=None): |
|
925 | 925 | """ |
|
926 | 926 | Returns user associated with this ChangesetComment. ie those |
|
927 | 927 | who actually commented |
|
928 | 928 | |
|
929 | 929 | :param cls: |
|
930 | 930 | :param revision: |
|
931 | 931 | """ |
|
932 | 932 | q = Session().query(User)\ |
|
933 | 933 | .join(ChangesetComment.author) |
|
934 | 934 | if revision: |
|
935 | 935 | q = q.filter(cls.revision == revision) |
|
936 | 936 | elif pull_request_id: |
|
937 | 937 | q = q.filter(cls.pull_request_id == pull_request_id) |
|
938 | 938 | return q.all() |
|
939 | 939 | |
|
940 | 940 | |
|
941 | 941 | class ChangesetStatus(Base, BaseModel): |
|
942 | 942 | __tablename__ = 'changeset_statuses' |
|
943 | 943 | __table_args__ = ( |
|
944 | 944 | Index('cs_revision_idx', 'revision'), |
|
945 | 945 | Index('cs_version_idx', 'version'), |
|
946 | 946 | UniqueConstraint('repo_id', 'revision', 'version'), |
|
947 | 947 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
948 | 948 | 'mysql_charset': 'utf8'} |
|
949 | 949 | ) |
|
950 | 950 | STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed' |
|
951 | 951 | STATUS_APPROVED = 'approved' |
|
952 | 952 | STATUS_REJECTED = 'rejected' |
|
953 | 953 | STATUS_UNDER_REVIEW = 'under_review' |
|
954 | 954 | |
|
955 | 955 | STATUSES = [ |
|
956 | 956 | (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default |
|
957 | 957 | (STATUS_APPROVED, _("Approved")), |
|
958 | 958 | (STATUS_REJECTED, _("Rejected")), |
|
959 | 959 | (STATUS_UNDER_REVIEW, _("Under Review")), |
|
960 | 960 | ] |
|
961 | 961 | |
|
962 | 962 | changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True) |
|
963 | 963 | repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False) |
|
964 | 964 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None) |
|
965 | 965 | revision = Column('revision', String(40), nullable=False) |
|
966 | 966 | status = Column('status', String(128), nullable=False, default=DEFAULT) |
|
967 | 967 | changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id')) |
|
968 | 968 | modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now) |
|
969 | 969 | version = Column('version', Integer(), nullable=False, default=0) |
|
970 | 970 | pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True) |
|
971 | 971 | |
|
972 | 972 | author = relationship('User', lazy='joined') |
|
973 | 973 | repo = relationship('Repository') |
|
974 | 974 | comment = relationship('ChangesetComment', lazy='joined') |
|
975 | 975 | pull_request = relationship('PullRequest', lazy='joined') |
|
976 | 976 | |
|
977 | 977 | |
|
978 | 978 | |
|
979 | 979 | class PullRequest(Base, BaseModel): |
|
980 | 980 | __tablename__ = 'pull_requests' |
|
981 | 981 | __table_args__ = ( |
|
982 | 982 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
983 | 983 | 'mysql_charset': 'utf8'}, |
|
984 | 984 | ) |
|
985 | 985 | |
|
986 | 986 | STATUS_NEW = u'new' |
|
987 | 987 | STATUS_OPEN = u'open' |
|
988 | 988 | STATUS_CLOSED = u'closed' |
|
989 | 989 | |
|
990 | 990 | pull_request_id = Column('pull_request_id', Integer(), nullable=False, primary_key=True) |
|
991 | 991 | title = Column('title', Unicode(256), nullable=True) |
|
992 | 992 | description = Column('description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=True) |
|
993 | 993 | status = Column('status', Unicode(256), nullable=False, default=STATUS_NEW) |
|
994 | 994 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
995 | 995 | updated_on = Column('updated_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
996 | 996 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None) |
|
997 | 997 | _revisions = Column('revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql')) |
|
998 | 998 | org_repo_id = Column('org_repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False) |
|
999 | 999 | org_ref = Column('org_ref', Unicode(256), nullable=False) |
|
1000 | 1000 | other_repo_id = Column('other_repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False) |
|
1001 | 1001 | other_ref = Column('other_ref', Unicode(256), nullable=False) |
|
1002 | 1002 | |
|
1003 | 1003 | author = relationship('User', lazy='joined') |
|
1004 | 1004 | reviewers = relationship('PullRequestReviewers', |
|
1005 | 1005 | cascade="all, delete, delete-orphan") |
|
1006 | 1006 | org_repo = relationship('Repository', primaryjoin='PullRequest.org_repo_id==Repository.repo_id') |
|
1007 | 1007 | other_repo = relationship('Repository', primaryjoin='PullRequest.other_repo_id==Repository.repo_id') |
|
1008 | 1008 | statuses = relationship('ChangesetStatus') |
|
1009 | 1009 | comments = relationship('ChangesetComment', |
|
1010 | 1010 | cascade="all, delete, delete-orphan") |
|
1011 | 1011 | |
|
1012 | 1012 | |
|
1013 | 1013 | class PullRequestReviewers(Base, BaseModel): |
|
1014 | 1014 | __tablename__ = 'pull_request_reviewers' |
|
1015 | 1015 | __table_args__ = ( |
|
1016 | 1016 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
1017 | 1017 | 'mysql_charset': 'utf8'}, |
|
1018 | 1018 | ) |
|
1019 | 1019 | |
|
1020 | 1020 | def __init__(self, user=None, pull_request=None): |
|
1021 | 1021 | self.user = user |
|
1022 | 1022 | self.pull_request = pull_request |
|
1023 | 1023 | |
|
1024 | 1024 | pull_requests_reviewers_id = Column('pull_requests_reviewers_id', Integer(), nullable=False, primary_key=True) |
|
1025 | 1025 | pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=False) |
|
1026 | 1026 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True) |
|
1027 | 1027 | |
|
1028 | 1028 | user = relationship('User') |
|
1029 | 1029 | pull_request = relationship('PullRequest') |
|
1030 | 1030 | |
|
1031 | 1031 | |
|
1032 | 1032 | class Notification(Base, BaseModel): |
|
1033 | 1033 | __tablename__ = 'notifications' |
|
1034 | 1034 | __table_args__ = ( |
|
1035 | 1035 | Index('notification_type_idx', 'type'), |
|
1036 | 1036 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
1037 | 1037 | 'mysql_charset': 'utf8'}, |
|
1038 | 1038 | ) |
|
1039 | 1039 | |
|
1040 | 1040 | TYPE_CHANGESET_COMMENT = u'cs_comment' |
|
1041 | 1041 | TYPE_MESSAGE = u'message' |
|
1042 | 1042 | TYPE_MENTION = u'mention' |
|
1043 | 1043 | TYPE_REGISTRATION = u'registration' |
|
1044 | 1044 | TYPE_PULL_REQUEST = u'pull_request' |
|
1045 | 1045 | TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment' |
|
1046 | 1046 | |
|
1047 | 1047 | notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True) |
|
1048 | 1048 | subject = Column('subject', Unicode(512), nullable=True) |
|
1049 | 1049 | body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True) |
|
1050 | 1050 | created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True) |
|
1051 | 1051 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
1052 | 1052 | type_ = Column('type', Unicode(256)) |
|
1053 | 1053 | |
|
1054 | 1054 | created_by_user = relationship('User') |
|
1055 | 1055 | notifications_to_users = relationship('UserNotification', lazy='joined', |
|
1056 | 1056 | cascade="all, delete, delete-orphan") |
|
1057 | 1057 | |
|
1058 | 1058 | |
|
1059 | 1059 | class UserNotification(Base, BaseModel): |
|
1060 | 1060 | __tablename__ = 'user_to_notification' |
|
1061 | 1061 | __table_args__ = ( |
|
1062 | 1062 | UniqueConstraint('user_id', 'notification_id'), |
|
1063 | 1063 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
1064 | 1064 | 'mysql_charset': 'utf8'} |
|
1065 | 1065 | ) |
|
1066 | 1066 | user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True) |
|
1067 | 1067 | notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True) |
|
1068 | 1068 | read = Column('read', Boolean, default=False) |
|
1069 | 1069 | sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None) |
|
1070 | 1070 | |
|
1071 | 1071 | user = relationship('User', lazy="joined") |
|
1072 | 1072 | notification = relationship('Notification', lazy="joined", |
|
1073 | 1073 | order_by=lambda: Notification.created_on.desc(),) |
|
1074 | 1074 | |
|
1075 | 1075 | |
|
1076 | 1076 | class DbMigrateVersion(Base, BaseModel): |
|
1077 | 1077 | __tablename__ = 'db_migrate_version' |
|
1078 | 1078 | __table_args__ = ( |
|
1079 | 1079 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
1080 | 1080 | 'mysql_charset': 'utf8'}, |
|
1081 | 1081 | ) |
|
1082 | 1082 | repository_id = Column('repository_id', String(250), primary_key=True) |
|
1083 | 1083 | repository_path = Column('repository_path', Text) |
|
1084 | 1084 | version = Column('version', Integer) |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
General Comments 0
You need to be logged in to leave comments.
Login now