Show More
@@ -1,385 +1,392 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2012-2016 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import pylons |
|
22 | 22 | import deform |
|
23 | 23 | import logging |
|
24 | 24 | import colander |
|
25 | 25 | import peppercorn |
|
26 | 26 | import webhelpers.paginate |
|
27 | 27 | |
|
28 | 28 | from pyramid.httpexceptions import HTTPFound, HTTPForbidden, HTTPBadRequest |
|
29 | 29 | from pyramid.renderers import render |
|
30 | 30 | from pyramid.response import Response |
|
31 | 31 | |
|
32 | 32 | from rhodecode.lib import auth |
|
33 | 33 | from rhodecode.lib.auth import LoginRequired, HasPermissionAllDecorator |
|
34 | 34 | from rhodecode.lib.utils2 import safe_int |
|
35 | 35 | from rhodecode.lib.helpers import Page |
|
36 | 36 | from rhodecode.model.db import Repository, RepoGroup, Session, Integration |
|
37 | 37 | from rhodecode.model.scm import ScmModel |
|
38 | 38 | from rhodecode.model.integration import IntegrationModel |
|
39 | 39 | from rhodecode.admin.navigation import navigation_list |
|
40 | 40 | from rhodecode.translation import _ |
|
41 | 41 | from rhodecode.integrations import integration_type_registry |
|
42 | 42 | from rhodecode.model.validation_schema.schemas.integration_schema import ( |
|
43 | make_integration_schema) | |
|
43 | make_integration_schema, IntegrationScopeType) | |
|
44 | 44 | |
|
45 | 45 | log = logging.getLogger(__name__) |
|
46 | 46 | |
|
47 | 47 | |
|
48 | 48 | class IntegrationSettingsViewBase(object): |
|
49 | 49 | """ Base Integration settings view used by both repo / global settings """ |
|
50 | 50 | |
|
51 | 51 | def __init__(self, context, request): |
|
52 | 52 | self.context = context |
|
53 | 53 | self.request = request |
|
54 | 54 | self._load_general_context() |
|
55 | 55 | |
|
56 | 56 | if not self.perm_check(request.user): |
|
57 | 57 | raise HTTPForbidden() |
|
58 | 58 | |
|
59 | 59 | def _load_general_context(self): |
|
60 | 60 | """ |
|
61 | 61 | This avoids boilerplate for repo/global+list/edit+views/templates |
|
62 | 62 | by doing all possible contexts at the same time however it should |
|
63 | 63 | be split up into separate functions once more "contexts" exist |
|
64 | 64 | """ |
|
65 | 65 | |
|
66 | 66 | self.IntegrationType = None |
|
67 | 67 | self.repo = None |
|
68 | 68 | self.repo_group = None |
|
69 | 69 | self.integration = None |
|
70 | 70 | self.integrations = {} |
|
71 | 71 | |
|
72 | 72 | request = self.request |
|
73 | 73 | |
|
74 | 74 | if 'repo_name' in request.matchdict: # in repo settings context |
|
75 | 75 | repo_name = request.matchdict['repo_name'] |
|
76 | 76 | self.repo = Repository.get_by_repo_name(repo_name) |
|
77 | 77 | |
|
78 | 78 | if 'repo_group_name' in request.matchdict: # in group settings context |
|
79 | 79 | repo_group_name = request.matchdict['repo_group_name'] |
|
80 | 80 | self.repo_group = RepoGroup.get_by_group_name(repo_group_name) |
|
81 | 81 | |
|
82 | 82 | |
|
83 | 83 | if 'integration' in request.matchdict: # integration type context |
|
84 | 84 | integration_type = request.matchdict['integration'] |
|
85 | 85 | self.IntegrationType = integration_type_registry[integration_type] |
|
86 | 86 | |
|
87 | 87 | if 'integration_id' in request.matchdict: # single integration context |
|
88 | 88 | integration_id = request.matchdict['integration_id'] |
|
89 | 89 | self.integration = Integration.get(integration_id) |
|
90 | 90 | |
|
91 | 91 | # extra perms check just in case |
|
92 | 92 | if not self._has_perms_for_integration(self.integration): |
|
93 | 93 | raise HTTPForbidden() |
|
94 | 94 | |
|
95 | 95 | self.settings = self.integration and self.integration.settings or {} |
|
96 | 96 | self.admin_view = not (self.repo or self.repo_group) |
|
97 | 97 | |
|
98 | 98 | def _has_perms_for_integration(self, integration): |
|
99 | 99 | perms = self.request.user.permissions |
|
100 | 100 | |
|
101 | 101 | if 'hg.admin' in perms['global']: |
|
102 | 102 | return True |
|
103 | 103 | |
|
104 | 104 | if integration.repo: |
|
105 | 105 | return perms['repositories'].get( |
|
106 | 106 | integration.repo.repo_name) == 'repository.admin' |
|
107 | 107 | |
|
108 | 108 | if integration.repo_group: |
|
109 | 109 | return perms['repositories_groups'].get( |
|
110 | 110 | integration.repo_group.group_name) == 'group.admin' |
|
111 | 111 | |
|
112 | 112 | return False |
|
113 | 113 | |
|
114 | 114 | def _template_c_context(self): |
|
115 | 115 | # TODO: dan: this is a stopgap in order to inherit from current pylons |
|
116 | 116 | # based admin/repo settings templates - this should be removed entirely |
|
117 | 117 | # after port to pyramid |
|
118 | 118 | |
|
119 | 119 | c = pylons.tmpl_context |
|
120 | 120 | c.active = 'integrations' |
|
121 | 121 | c.rhodecode_user = self.request.user |
|
122 | 122 | c.repo = self.repo |
|
123 | 123 | c.repo_group = self.repo_group |
|
124 | 124 | c.repo_name = self.repo and self.repo.repo_name or None |
|
125 | 125 | c.repo_group_name = self.repo_group and self.repo_group.group_name or None |
|
126 | 126 | |
|
127 | 127 | if self.repo: |
|
128 | 128 | c.repo_info = self.repo |
|
129 | 129 | c.rhodecode_db_repo = self.repo |
|
130 | 130 | c.repository_pull_requests = ScmModel().get_pull_requests(self.repo) |
|
131 | 131 | else: |
|
132 | 132 | c.navlist = navigation_list(self.request) |
|
133 | 133 | |
|
134 | 134 | return c |
|
135 | 135 | |
|
136 | 136 | def _form_schema(self): |
|
137 | 137 | schema = make_integration_schema(IntegrationType=self.IntegrationType, |
|
138 | 138 | settings=self.settings) |
|
139 | 139 | |
|
140 | 140 | # returns a clone, important if mutating the schema later |
|
141 | 141 | return schema.bind( |
|
142 | 142 | permissions=self.request.user.permissions, |
|
143 | 143 | no_scope=not self.admin_view) |
|
144 | 144 | |
|
145 | 145 | |
|
146 | 146 | def _form_defaults(self): |
|
147 | 147 | defaults = {} |
|
148 | 148 | |
|
149 | 149 | if self.integration: |
|
150 | 150 | defaults['settings'] = self.integration.settings or {} |
|
151 | 151 | defaults['options'] = { |
|
152 | 152 | 'name': self.integration.name, |
|
153 | 153 | 'enabled': self.integration.enabled, |
|
154 |
'scope': |
|
|
154 | 'scope': { | |
|
155 | 'repo': self.integration.repo, | |
|
156 | 'repo_group': self.integration.repo_group, | |
|
157 | 'child_repos_only': self.integration.child_repos_only, | |
|
158 | }, | |
|
155 | 159 | } |
|
156 | 160 | else: |
|
157 | 161 | if self.repo: |
|
158 | 162 | scope = _('{repo_name} repository').format( |
|
159 | 163 | repo_name=self.repo.repo_name) |
|
160 | 164 | elif self.repo_group: |
|
161 | 165 | scope = _('{repo_group_name} repo group').format( |
|
162 | 166 | repo_group_name=self.repo_group.group_name) |
|
163 | 167 | else: |
|
164 | 168 | scope = _('Global') |
|
165 | 169 | |
|
166 | 170 | defaults['options'] = { |
|
167 | 171 | 'enabled': True, |
|
168 | 172 | 'name': _('{name} integration').format( |
|
169 | 173 | name=self.IntegrationType.display_name), |
|
170 | 174 | } |
|
171 | if self.repo: | |
|
172 |
|
|
|
173 |
|
|
|
174 | defaults['options']['scope'] = self.repo_group | |
|
175 | defaults['options']['scope'] = { | |
|
176 | 'repo': self.repo, | |
|
177 | 'repo_group': self.repo_group, | |
|
178 | } | |
|
175 | 179 | |
|
176 | 180 | return defaults |
|
177 | 181 | |
|
178 | 182 | def _delete_integration(self, integration): |
|
179 | 183 | Session().delete(self.integration) |
|
180 | 184 | Session().commit() |
|
181 | 185 | self.request.session.flash( |
|
182 | 186 | _('Integration {integration_name} deleted successfully.').format( |
|
183 | 187 | integration_name=self.integration.name), |
|
184 | 188 | queue='success') |
|
185 | 189 | |
|
186 | 190 | if self.repo: |
|
187 | 191 | redirect_to = self.request.route_url( |
|
188 | 192 | 'repo_integrations_home', repo_name=self.repo.repo_name) |
|
189 | 193 | elif self.repo_group: |
|
190 | 194 | redirect_to = self.request.route_url( |
|
191 | 195 | 'repo_group_integrations_home', |
|
192 | 196 | repo_group_name=self.repo_group.group_name) |
|
193 | 197 | else: |
|
194 | 198 | redirect_to = self.request.route_url('global_integrations_home') |
|
195 | 199 | raise HTTPFound(redirect_to) |
|
196 | 200 | |
|
197 | 201 | def settings_get(self, defaults=None, form=None): |
|
198 | 202 | """ |
|
199 | 203 | View that displays the integration settings as a form. |
|
200 | 204 | """ |
|
201 | 205 | |
|
202 | 206 | defaults = defaults or self._form_defaults() |
|
203 | 207 | schema = self._form_schema() |
|
204 | 208 | |
|
205 | 209 | if self.integration: |
|
206 | 210 | buttons = ('submit', 'delete') |
|
207 | 211 | else: |
|
208 | 212 | buttons = ('submit',) |
|
209 | 213 | |
|
210 | 214 | form = form or deform.Form(schema, appstruct=defaults, buttons=buttons) |
|
211 | 215 | |
|
212 | 216 | template_context = { |
|
213 | 217 | 'form': form, |
|
214 | 218 | 'current_IntegrationType': self.IntegrationType, |
|
215 | 219 | 'integration': self.integration, |
|
216 | 220 | 'c': self._template_c_context(), |
|
217 | 221 | } |
|
218 | 222 | |
|
219 | 223 | return template_context |
|
220 | 224 | |
|
221 | 225 | @auth.CSRFRequired() |
|
222 | 226 | def settings_post(self): |
|
223 | 227 | """ |
|
224 | 228 | View that validates and stores the integration settings. |
|
225 | 229 | """ |
|
226 | 230 | controls = self.request.POST.items() |
|
227 | 231 | pstruct = peppercorn.parse(controls) |
|
228 | 232 | |
|
229 | 233 | if self.integration and pstruct.get('delete'): |
|
230 | 234 | return self._delete_integration(self.integration) |
|
231 | 235 | |
|
232 | 236 | schema = self._form_schema() |
|
233 | 237 | |
|
234 | 238 | skip_settings_validation = False |
|
235 | 239 | if self.integration and 'enabled' not in pstruct.get('options', {}): |
|
236 | 240 | skip_settings_validation = True |
|
237 | 241 | schema['settings'].validator = None |
|
238 | 242 | for field in schema['settings'].children: |
|
239 | 243 | field.validator = None |
|
240 | 244 | field.missing = '' |
|
241 | 245 | |
|
242 | 246 | if self.integration: |
|
243 | 247 | buttons = ('submit', 'delete') |
|
244 | 248 | else: |
|
245 | 249 | buttons = ('submit',) |
|
246 | 250 | |
|
247 | 251 | form = deform.Form(schema, buttons=buttons) |
|
248 | 252 | |
|
249 | 253 | if not self.admin_view: |
|
250 | 254 | # scope is read only field in these cases, and has to be added |
|
251 | 255 | options = pstruct.setdefault('options', {}) |
|
252 | 256 | if 'scope' not in options: |
|
253 | if self.repo: | |
|
254 |
|
|
|
255 |
|
|
|
256 | options['scope'] = 'repogroup:{}'.format( | |
|
257 | self.repo_group.group_name) | |
|
257 | options['scope'] = IntegrationScopeType().serialize(None, { | |
|
258 | 'repo': self.repo, | |
|
259 | 'repo_group': self.repo_group, | |
|
260 | }) | |
|
258 | 261 | |
|
259 | 262 | try: |
|
260 | 263 | valid_data = form.validate_pstruct(pstruct) |
|
261 | 264 | except deform.ValidationFailure as e: |
|
262 | 265 | self.request.session.flash( |
|
263 | 266 | _('Errors exist when saving integration settings. ' |
|
264 | 267 | 'Please check the form inputs.'), |
|
265 | 268 | queue='error') |
|
266 | 269 | return self.settings_get(form=e) |
|
267 | 270 | |
|
268 | 271 | if not self.integration: |
|
269 | 272 | self.integration = Integration() |
|
270 | 273 | self.integration.integration_type = self.IntegrationType.key |
|
271 | 274 | Session().add(self.integration) |
|
272 | 275 | |
|
273 | 276 | scope = valid_data['options']['scope'] |
|
274 | 277 | |
|
275 | 278 | IntegrationModel().update_integration(self.integration, |
|
276 | 279 | name=valid_data['options']['name'], |
|
277 | 280 | enabled=valid_data['options']['enabled'], |
|
278 | 281 | settings=valid_data['settings'], |
|
279 |
|
|
|
282 | repo=scope['repo'], | |
|
283 | repo_group=scope['repo_group'], | |
|
284 | child_repos_only=scope['child_repos_only'], | |
|
285 | ) | |
|
286 | ||
|
280 | 287 | |
|
281 | 288 | self.integration.settings = valid_data['settings'] |
|
282 | 289 | Session().commit() |
|
283 | 290 | # Display success message and redirect. |
|
284 | 291 | self.request.session.flash( |
|
285 | 292 | _('Integration {integration_name} updated successfully.').format( |
|
286 | 293 | integration_name=self.IntegrationType.display_name), |
|
287 | 294 | queue='success') |
|
288 | 295 | |
|
289 | 296 | |
|
290 | 297 | # if integration scope changes, we must redirect to the right place |
|
291 | 298 | # keeping in mind if the original view was for /repo/ or /_admin/ |
|
292 | 299 | admin_view = not (self.repo or self.repo_group) |
|
293 | 300 | |
|
294 |
if |
|
|
301 | if self.integration.repo and not admin_view: | |
|
295 | 302 | redirect_to = self.request.route_path( |
|
296 | 303 | 'repo_integrations_edit', |
|
297 |
repo_name=self.integration. |
|
|
304 | repo_name=self.integration.repo.repo_name, | |
|
298 | 305 | integration=self.integration.integration_type, |
|
299 | 306 | integration_id=self.integration.integration_id) |
|
300 |
elif |
|
|
307 | elif self.integration.repo_group and not admin_view: | |
|
301 | 308 | redirect_to = self.request.route_path( |
|
302 | 309 | 'repo_group_integrations_edit', |
|
303 |
repo_group_name=self.integration. |
|
|
310 | repo_group_name=self.integration.repo_group.group_name, | |
|
304 | 311 | integration=self.integration.integration_type, |
|
305 | 312 | integration_id=self.integration.integration_id) |
|
306 | 313 | else: |
|
307 | 314 | redirect_to = self.request.route_path( |
|
308 | 315 | 'global_integrations_edit', |
|
309 | 316 | integration=self.integration.integration_type, |
|
310 | 317 | integration_id=self.integration.integration_id) |
|
311 | 318 | |
|
312 | 319 | return HTTPFound(redirect_to) |
|
313 | 320 | |
|
314 | 321 | def index(self): |
|
315 | 322 | """ List integrations """ |
|
316 | 323 | if self.repo: |
|
317 | 324 | scope = self.repo |
|
318 | 325 | elif self.repo_group: |
|
319 | 326 | scope = self.repo_group |
|
320 | 327 | else: |
|
321 | 328 | scope = 'all' |
|
322 | 329 | |
|
323 | 330 | integrations = [] |
|
324 | 331 | |
|
325 | 332 | for integration in IntegrationModel().get_integrations( |
|
326 | 333 | scope=scope, IntegrationType=self.IntegrationType): |
|
327 | 334 | |
|
328 | 335 | # extra permissions check *just in case* |
|
329 | 336 | if not self._has_perms_for_integration(integration): |
|
330 | 337 | continue |
|
331 | 338 | integrations.append(integration) |
|
332 | 339 | |
|
333 | 340 | sort_arg = self.request.GET.get('sort', 'name:asc') |
|
334 | 341 | if ':' in sort_arg: |
|
335 | 342 | sort_field, sort_dir = sort_arg.split(':') |
|
336 | 343 | else: |
|
337 | 344 | sort_field = sort_arg, 'asc' |
|
338 | 345 | |
|
339 | 346 | assert sort_field in ('name', 'integration_type', 'enabled', 'scope') |
|
340 | 347 | |
|
341 | 348 | integrations.sort( |
|
342 | 349 | key=lambda x: getattr(x[1], sort_field), reverse=(sort_dir=='desc')) |
|
343 | 350 | |
|
344 | 351 | |
|
345 | 352 | page_url = webhelpers.paginate.PageURL( |
|
346 | 353 | self.request.path, self.request.GET) |
|
347 | 354 | page = safe_int(self.request.GET.get('page', 1), 1) |
|
348 | 355 | |
|
349 | 356 | integrations = Page(integrations, page=page, items_per_page=10, |
|
350 | 357 | url=page_url) |
|
351 | 358 | |
|
352 | 359 | template_context = { |
|
353 | 360 | 'sort_field': sort_field, |
|
354 | 361 | 'rev_sort_dir': sort_dir != 'desc' and 'desc' or 'asc', |
|
355 | 362 | 'current_IntegrationType': self.IntegrationType, |
|
356 | 363 | 'integrations_list': integrations, |
|
357 | 364 | 'available_integrations': integration_type_registry, |
|
358 | 365 | 'c': self._template_c_context(), |
|
359 | 366 | 'request': self.request, |
|
360 | 367 | } |
|
361 | 368 | return template_context |
|
362 | 369 | |
|
363 | 370 | def new_integration(self): |
|
364 | 371 | template_context = { |
|
365 | 372 | 'available_integrations': integration_type_registry, |
|
366 | 373 | 'c': self._template_c_context(), |
|
367 | 374 | } |
|
368 | 375 | return template_context |
|
369 | 376 | |
|
370 | 377 | class GlobalIntegrationsView(IntegrationSettingsViewBase): |
|
371 | 378 | def perm_check(self, user): |
|
372 | 379 | return auth.HasPermissionAll('hg.admin').check_permissions(user=user) |
|
373 | 380 | |
|
374 | 381 | |
|
375 | 382 | class RepoIntegrationsView(IntegrationSettingsViewBase): |
|
376 | 383 | def perm_check(self, user): |
|
377 | 384 | return auth.HasRepoPermissionAll('repository.admin' |
|
378 | 385 | )(repo_name=self.repo.repo_name, user=user) |
|
379 | 386 | |
|
380 | 387 | |
|
381 | 388 | class RepoGroupIntegrationsView(IntegrationSettingsViewBase): |
|
382 | 389 | def perm_check(self, user): |
|
383 | 390 | return auth.HasRepoGroupPermissionAll('group.admin' |
|
384 | 391 | )(group_name=self.repo_group.group_name, user=user) |
|
385 | 392 |
@@ -1,3534 +1,3516 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2016 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | Database Models for RhodeCode Enterprise |
|
23 | 23 | """ |
|
24 | 24 | |
|
25 | 25 | import os |
|
26 | 26 | import sys |
|
27 | 27 | import time |
|
28 | 28 | import hashlib |
|
29 | 29 | import logging |
|
30 | 30 | import datetime |
|
31 | 31 | import warnings |
|
32 | 32 | import ipaddress |
|
33 | 33 | import functools |
|
34 | 34 | import traceback |
|
35 | 35 | import collections |
|
36 | 36 | |
|
37 | 37 | |
|
38 | 38 | from sqlalchemy import * |
|
39 | 39 | from sqlalchemy.exc import IntegrityError |
|
40 | 40 | from sqlalchemy.ext.declarative import declared_attr |
|
41 | 41 | from sqlalchemy.ext.hybrid import hybrid_property |
|
42 | 42 | from sqlalchemy.orm import ( |
|
43 | 43 | relationship, joinedload, class_mapper, validates, aliased) |
|
44 | 44 | from sqlalchemy.sql.expression import true |
|
45 | 45 | from beaker.cache import cache_region, region_invalidate |
|
46 | 46 | from webob.exc import HTTPNotFound |
|
47 | 47 | from zope.cachedescriptors.property import Lazy as LazyProperty |
|
48 | 48 | |
|
49 | 49 | from pylons import url |
|
50 | 50 | from pylons.i18n.translation import lazy_ugettext as _ |
|
51 | 51 | |
|
52 | 52 | from rhodecode.lib.vcs import get_backend, get_vcs_instance |
|
53 | 53 | from rhodecode.lib.vcs.utils.helpers import get_scm |
|
54 | 54 | from rhodecode.lib.vcs.exceptions import VCSError |
|
55 | 55 | from rhodecode.lib.vcs.backends.base import ( |
|
56 | 56 | EmptyCommit, Reference, MergeFailureReason) |
|
57 | 57 | from rhodecode.lib.utils2 import ( |
|
58 | 58 | str2bool, safe_str, get_commit_safe, safe_unicode, remove_prefix, md5_safe, |
|
59 | 59 | time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict) |
|
60 | 60 | from rhodecode.lib.jsonalchemy import MutationObj, JsonType, JSONDict |
|
61 | 61 | from rhodecode.lib.ext_json import json |
|
62 | 62 | from rhodecode.lib.caching_query import FromCache |
|
63 | 63 | from rhodecode.lib.encrypt import AESCipher |
|
64 | 64 | |
|
65 | 65 | from rhodecode.model.meta import Base, Session |
|
66 | 66 | |
|
67 | 67 | URL_SEP = '/' |
|
68 | 68 | log = logging.getLogger(__name__) |
|
69 | 69 | |
|
70 | 70 | # ============================================================================= |
|
71 | 71 | # BASE CLASSES |
|
72 | 72 | # ============================================================================= |
|
73 | 73 | |
|
74 | 74 | # this is propagated from .ini file rhodecode.encrypted_values.secret or |
|
75 | 75 | # beaker.session.secret if first is not set. |
|
76 | 76 | # and initialized at environment.py |
|
77 | 77 | ENCRYPTION_KEY = None |
|
78 | 78 | |
|
79 | 79 | # used to sort permissions by types, '#' used here is not allowed to be in |
|
80 | 80 | # usernames, and it's very early in sorted string.printable table. |
|
81 | 81 | PERMISSION_TYPE_SORT = { |
|
82 | 82 | 'admin': '####', |
|
83 | 83 | 'write': '###', |
|
84 | 84 | 'read': '##', |
|
85 | 85 | 'none': '#', |
|
86 | 86 | } |
|
87 | 87 | |
|
88 | 88 | |
|
89 | 89 | def display_sort(obj): |
|
90 | 90 | """ |
|
91 | 91 | Sort function used to sort permissions in .permissions() function of |
|
92 | 92 | Repository, RepoGroup, UserGroup. Also it put the default user in front |
|
93 | 93 | of all other resources |
|
94 | 94 | """ |
|
95 | 95 | |
|
96 | 96 | if obj.username == User.DEFAULT_USER: |
|
97 | 97 | return '#####' |
|
98 | 98 | prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '') |
|
99 | 99 | return prefix + obj.username |
|
100 | 100 | |
|
101 | 101 | |
|
102 | 102 | def _hash_key(k): |
|
103 | 103 | return md5_safe(k) |
|
104 | 104 | |
|
105 | 105 | |
|
106 | 106 | class EncryptedTextValue(TypeDecorator): |
|
107 | 107 | """ |
|
108 | 108 | Special column for encrypted long text data, use like:: |
|
109 | 109 | |
|
110 | 110 | value = Column("encrypted_value", EncryptedValue(), nullable=False) |
|
111 | 111 | |
|
112 | 112 | This column is intelligent so if value is in unencrypted form it return |
|
113 | 113 | unencrypted form, but on save it always encrypts |
|
114 | 114 | """ |
|
115 | 115 | impl = Text |
|
116 | 116 | |
|
117 | 117 | def process_bind_param(self, value, dialect): |
|
118 | 118 | if not value: |
|
119 | 119 | return value |
|
120 | 120 | if value.startswith('enc$aes$') or value.startswith('enc$aes_hmac$'): |
|
121 | 121 | # protect against double encrypting if someone manually starts |
|
122 | 122 | # doing |
|
123 | 123 | raise ValueError('value needs to be in unencrypted format, ie. ' |
|
124 | 124 | 'not starting with enc$aes') |
|
125 | 125 | return 'enc$aes_hmac$%s' % AESCipher( |
|
126 | 126 | ENCRYPTION_KEY, hmac=True).encrypt(value) |
|
127 | 127 | |
|
128 | 128 | def process_result_value(self, value, dialect): |
|
129 | 129 | import rhodecode |
|
130 | 130 | |
|
131 | 131 | if not value: |
|
132 | 132 | return value |
|
133 | 133 | |
|
134 | 134 | parts = value.split('$', 3) |
|
135 | 135 | if not len(parts) == 3: |
|
136 | 136 | # probably not encrypted values |
|
137 | 137 | return value |
|
138 | 138 | else: |
|
139 | 139 | if parts[0] != 'enc': |
|
140 | 140 | # parts ok but without our header ? |
|
141 | 141 | return value |
|
142 | 142 | enc_strict_mode = str2bool(rhodecode.CONFIG.get( |
|
143 | 143 | 'rhodecode.encrypted_values.strict') or True) |
|
144 | 144 | # at that stage we know it's our encryption |
|
145 | 145 | if parts[1] == 'aes': |
|
146 | 146 | decrypted_data = AESCipher(ENCRYPTION_KEY).decrypt(parts[2]) |
|
147 | 147 | elif parts[1] == 'aes_hmac': |
|
148 | 148 | decrypted_data = AESCipher( |
|
149 | 149 | ENCRYPTION_KEY, hmac=True, |
|
150 | 150 | strict_verification=enc_strict_mode).decrypt(parts[2]) |
|
151 | 151 | else: |
|
152 | 152 | raise ValueError( |
|
153 | 153 | 'Encryption type part is wrong, must be `aes` ' |
|
154 | 154 | 'or `aes_hmac`, got `%s` instead' % (parts[1])) |
|
155 | 155 | return decrypted_data |
|
156 | 156 | |
|
157 | 157 | |
|
158 | 158 | class BaseModel(object): |
|
159 | 159 | """ |
|
160 | 160 | Base Model for all classes |
|
161 | 161 | """ |
|
162 | 162 | |
|
163 | 163 | @classmethod |
|
164 | 164 | def _get_keys(cls): |
|
165 | 165 | """return column names for this model """ |
|
166 | 166 | return class_mapper(cls).c.keys() |
|
167 | 167 | |
|
168 | 168 | def get_dict(self): |
|
169 | 169 | """ |
|
170 | 170 | return dict with keys and values corresponding |
|
171 | 171 | to this model data """ |
|
172 | 172 | |
|
173 | 173 | d = {} |
|
174 | 174 | for k in self._get_keys(): |
|
175 | 175 | d[k] = getattr(self, k) |
|
176 | 176 | |
|
177 | 177 | # also use __json__() if present to get additional fields |
|
178 | 178 | _json_attr = getattr(self, '__json__', None) |
|
179 | 179 | if _json_attr: |
|
180 | 180 | # update with attributes from __json__ |
|
181 | 181 | if callable(_json_attr): |
|
182 | 182 | _json_attr = _json_attr() |
|
183 | 183 | for k, val in _json_attr.iteritems(): |
|
184 | 184 | d[k] = val |
|
185 | 185 | return d |
|
186 | 186 | |
|
187 | 187 | def get_appstruct(self): |
|
188 | 188 | """return list with keys and values tuples corresponding |
|
189 | 189 | to this model data """ |
|
190 | 190 | |
|
191 | 191 | l = [] |
|
192 | 192 | for k in self._get_keys(): |
|
193 | 193 | l.append((k, getattr(self, k),)) |
|
194 | 194 | return l |
|
195 | 195 | |
|
196 | 196 | def populate_obj(self, populate_dict): |
|
197 | 197 | """populate model with data from given populate_dict""" |
|
198 | 198 | |
|
199 | 199 | for k in self._get_keys(): |
|
200 | 200 | if k in populate_dict: |
|
201 | 201 | setattr(self, k, populate_dict[k]) |
|
202 | 202 | |
|
203 | 203 | @classmethod |
|
204 | 204 | def query(cls): |
|
205 | 205 | return Session().query(cls) |
|
206 | 206 | |
|
207 | 207 | @classmethod |
|
208 | 208 | def get(cls, id_): |
|
209 | 209 | if id_: |
|
210 | 210 | return cls.query().get(id_) |
|
211 | 211 | |
|
212 | 212 | @classmethod |
|
213 | 213 | def get_or_404(cls, id_): |
|
214 | 214 | try: |
|
215 | 215 | id_ = int(id_) |
|
216 | 216 | except (TypeError, ValueError): |
|
217 | 217 | raise HTTPNotFound |
|
218 | 218 | |
|
219 | 219 | res = cls.query().get(id_) |
|
220 | 220 | if not res: |
|
221 | 221 | raise HTTPNotFound |
|
222 | 222 | return res |
|
223 | 223 | |
|
224 | 224 | @classmethod |
|
225 | 225 | def getAll(cls): |
|
226 | 226 | # deprecated and left for backward compatibility |
|
227 | 227 | return cls.get_all() |
|
228 | 228 | |
|
229 | 229 | @classmethod |
|
230 | 230 | def get_all(cls): |
|
231 | 231 | return cls.query().all() |
|
232 | 232 | |
|
233 | 233 | @classmethod |
|
234 | 234 | def delete(cls, id_): |
|
235 | 235 | obj = cls.query().get(id_) |
|
236 | 236 | Session().delete(obj) |
|
237 | 237 | |
|
238 | 238 | @classmethod |
|
239 | 239 | def identity_cache(cls, session, attr_name, value): |
|
240 | 240 | exist_in_session = [] |
|
241 | 241 | for (item_cls, pkey), instance in session.identity_map.items(): |
|
242 | 242 | if cls == item_cls and getattr(instance, attr_name) == value: |
|
243 | 243 | exist_in_session.append(instance) |
|
244 | 244 | if exist_in_session: |
|
245 | 245 | if len(exist_in_session) == 1: |
|
246 | 246 | return exist_in_session[0] |
|
247 | 247 | log.exception( |
|
248 | 248 | 'multiple objects with attr %s and ' |
|
249 | 249 | 'value %s found with same name: %r', |
|
250 | 250 | attr_name, value, exist_in_session) |
|
251 | 251 | |
|
252 | 252 | def __repr__(self): |
|
253 | 253 | if hasattr(self, '__unicode__'): |
|
254 | 254 | # python repr needs to return str |
|
255 | 255 | try: |
|
256 | 256 | return safe_str(self.__unicode__()) |
|
257 | 257 | except UnicodeDecodeError: |
|
258 | 258 | pass |
|
259 | 259 | return '<DB:%s>' % (self.__class__.__name__) |
|
260 | 260 | |
|
261 | 261 | |
|
262 | 262 | class RhodeCodeSetting(Base, BaseModel): |
|
263 | 263 | __tablename__ = 'rhodecode_settings' |
|
264 | 264 | __table_args__ = ( |
|
265 | 265 | UniqueConstraint('app_settings_name'), |
|
266 | 266 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
267 | 267 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} |
|
268 | 268 | ) |
|
269 | 269 | |
|
270 | 270 | SETTINGS_TYPES = { |
|
271 | 271 | 'str': safe_str, |
|
272 | 272 | 'int': safe_int, |
|
273 | 273 | 'unicode': safe_unicode, |
|
274 | 274 | 'bool': str2bool, |
|
275 | 275 | 'list': functools.partial(aslist, sep=',') |
|
276 | 276 | } |
|
277 | 277 | DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions' |
|
278 | 278 | GLOBAL_CONF_KEY = 'app_settings' |
|
279 | 279 | |
|
280 | 280 | app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
281 | 281 | app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None) |
|
282 | 282 | _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None) |
|
283 | 283 | _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None) |
|
284 | 284 | |
|
285 | 285 | def __init__(self, key='', val='', type='unicode'): |
|
286 | 286 | self.app_settings_name = key |
|
287 | 287 | self.app_settings_type = type |
|
288 | 288 | self.app_settings_value = val |
|
289 | 289 | |
|
290 | 290 | @validates('_app_settings_value') |
|
291 | 291 | def validate_settings_value(self, key, val): |
|
292 | 292 | assert type(val) == unicode |
|
293 | 293 | return val |
|
294 | 294 | |
|
295 | 295 | @hybrid_property |
|
296 | 296 | def app_settings_value(self): |
|
297 | 297 | v = self._app_settings_value |
|
298 | 298 | _type = self.app_settings_type |
|
299 | 299 | if _type: |
|
300 | 300 | _type = self.app_settings_type.split('.')[0] |
|
301 | 301 | # decode the encrypted value |
|
302 | 302 | if 'encrypted' in self.app_settings_type: |
|
303 | 303 | cipher = EncryptedTextValue() |
|
304 | 304 | v = safe_unicode(cipher.process_result_value(v, None)) |
|
305 | 305 | |
|
306 | 306 | converter = self.SETTINGS_TYPES.get(_type) or \ |
|
307 | 307 | self.SETTINGS_TYPES['unicode'] |
|
308 | 308 | return converter(v) |
|
309 | 309 | |
|
310 | 310 | @app_settings_value.setter |
|
311 | 311 | def app_settings_value(self, val): |
|
312 | 312 | """ |
|
313 | 313 | Setter that will always make sure we use unicode in app_settings_value |
|
314 | 314 | |
|
315 | 315 | :param val: |
|
316 | 316 | """ |
|
317 | 317 | val = safe_unicode(val) |
|
318 | 318 | # encode the encrypted value |
|
319 | 319 | if 'encrypted' in self.app_settings_type: |
|
320 | 320 | cipher = EncryptedTextValue() |
|
321 | 321 | val = safe_unicode(cipher.process_bind_param(val, None)) |
|
322 | 322 | self._app_settings_value = val |
|
323 | 323 | |
|
324 | 324 | @hybrid_property |
|
325 | 325 | def app_settings_type(self): |
|
326 | 326 | return self._app_settings_type |
|
327 | 327 | |
|
328 | 328 | @app_settings_type.setter |
|
329 | 329 | def app_settings_type(self, val): |
|
330 | 330 | if val.split('.')[0] not in self.SETTINGS_TYPES: |
|
331 | 331 | raise Exception('type must be one of %s got %s' |
|
332 | 332 | % (self.SETTINGS_TYPES.keys(), val)) |
|
333 | 333 | self._app_settings_type = val |
|
334 | 334 | |
|
335 | 335 | def __unicode__(self): |
|
336 | 336 | return u"<%s('%s:%s[%s]')>" % ( |
|
337 | 337 | self.__class__.__name__, |
|
338 | 338 | self.app_settings_name, self.app_settings_value, |
|
339 | 339 | self.app_settings_type |
|
340 | 340 | ) |
|
341 | 341 | |
|
342 | 342 | |
|
343 | 343 | class RhodeCodeUi(Base, BaseModel): |
|
344 | 344 | __tablename__ = 'rhodecode_ui' |
|
345 | 345 | __table_args__ = ( |
|
346 | 346 | UniqueConstraint('ui_key'), |
|
347 | 347 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
348 | 348 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} |
|
349 | 349 | ) |
|
350 | 350 | |
|
351 | 351 | HOOK_REPO_SIZE = 'changegroup.repo_size' |
|
352 | 352 | # HG |
|
353 | 353 | HOOK_PRE_PULL = 'preoutgoing.pre_pull' |
|
354 | 354 | HOOK_PULL = 'outgoing.pull_logger' |
|
355 | 355 | HOOK_PRE_PUSH = 'prechangegroup.pre_push' |
|
356 | 356 | HOOK_PUSH = 'changegroup.push_logger' |
|
357 | 357 | |
|
358 | 358 | # TODO: johbo: Unify way how hooks are configured for git and hg, |
|
359 | 359 | # git part is currently hardcoded. |
|
360 | 360 | |
|
361 | 361 | # SVN PATTERNS |
|
362 | 362 | SVN_BRANCH_ID = 'vcs_svn_branch' |
|
363 | 363 | SVN_TAG_ID = 'vcs_svn_tag' |
|
364 | 364 | |
|
365 | 365 | ui_id = Column( |
|
366 | 366 | "ui_id", Integer(), nullable=False, unique=True, default=None, |
|
367 | 367 | primary_key=True) |
|
368 | 368 | ui_section = Column( |
|
369 | 369 | "ui_section", String(255), nullable=True, unique=None, default=None) |
|
370 | 370 | ui_key = Column( |
|
371 | 371 | "ui_key", String(255), nullable=True, unique=None, default=None) |
|
372 | 372 | ui_value = Column( |
|
373 | 373 | "ui_value", String(255), nullable=True, unique=None, default=None) |
|
374 | 374 | ui_active = Column( |
|
375 | 375 | "ui_active", Boolean(), nullable=True, unique=None, default=True) |
|
376 | 376 | |
|
377 | 377 | def __repr__(self): |
|
378 | 378 | return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.ui_section, |
|
379 | 379 | self.ui_key, self.ui_value) |
|
380 | 380 | |
|
381 | 381 | |
|
382 | 382 | class RepoRhodeCodeSetting(Base, BaseModel): |
|
383 | 383 | __tablename__ = 'repo_rhodecode_settings' |
|
384 | 384 | __table_args__ = ( |
|
385 | 385 | UniqueConstraint( |
|
386 | 386 | 'app_settings_name', 'repository_id', |
|
387 | 387 | name='uq_repo_rhodecode_setting_name_repo_id'), |
|
388 | 388 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
389 | 389 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} |
|
390 | 390 | ) |
|
391 | 391 | |
|
392 | 392 | repository_id = Column( |
|
393 | 393 | "repository_id", Integer(), ForeignKey('repositories.repo_id'), |
|
394 | 394 | nullable=False) |
|
395 | 395 | app_settings_id = Column( |
|
396 | 396 | "app_settings_id", Integer(), nullable=False, unique=True, |
|
397 | 397 | default=None, primary_key=True) |
|
398 | 398 | app_settings_name = Column( |
|
399 | 399 | "app_settings_name", String(255), nullable=True, unique=None, |
|
400 | 400 | default=None) |
|
401 | 401 | _app_settings_value = Column( |
|
402 | 402 | "app_settings_value", String(4096), nullable=True, unique=None, |
|
403 | 403 | default=None) |
|
404 | 404 | _app_settings_type = Column( |
|
405 | 405 | "app_settings_type", String(255), nullable=True, unique=None, |
|
406 | 406 | default=None) |
|
407 | 407 | |
|
408 | 408 | repository = relationship('Repository') |
|
409 | 409 | |
|
410 | 410 | def __init__(self, repository_id, key='', val='', type='unicode'): |
|
411 | 411 | self.repository_id = repository_id |
|
412 | 412 | self.app_settings_name = key |
|
413 | 413 | self.app_settings_type = type |
|
414 | 414 | self.app_settings_value = val |
|
415 | 415 | |
|
416 | 416 | @validates('_app_settings_value') |
|
417 | 417 | def validate_settings_value(self, key, val): |
|
418 | 418 | assert type(val) == unicode |
|
419 | 419 | return val |
|
420 | 420 | |
|
421 | 421 | @hybrid_property |
|
422 | 422 | def app_settings_value(self): |
|
423 | 423 | v = self._app_settings_value |
|
424 | 424 | type_ = self.app_settings_type |
|
425 | 425 | SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES |
|
426 | 426 | converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode'] |
|
427 | 427 | return converter(v) |
|
428 | 428 | |
|
429 | 429 | @app_settings_value.setter |
|
430 | 430 | def app_settings_value(self, val): |
|
431 | 431 | """ |
|
432 | 432 | Setter that will always make sure we use unicode in app_settings_value |
|
433 | 433 | |
|
434 | 434 | :param val: |
|
435 | 435 | """ |
|
436 | 436 | self._app_settings_value = safe_unicode(val) |
|
437 | 437 | |
|
438 | 438 | @hybrid_property |
|
439 | 439 | def app_settings_type(self): |
|
440 | 440 | return self._app_settings_type |
|
441 | 441 | |
|
442 | 442 | @app_settings_type.setter |
|
443 | 443 | def app_settings_type(self, val): |
|
444 | 444 | SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES |
|
445 | 445 | if val not in SETTINGS_TYPES: |
|
446 | 446 | raise Exception('type must be one of %s got %s' |
|
447 | 447 | % (SETTINGS_TYPES.keys(), val)) |
|
448 | 448 | self._app_settings_type = val |
|
449 | 449 | |
|
450 | 450 | def __unicode__(self): |
|
451 | 451 | return u"<%s('%s:%s:%s[%s]')>" % ( |
|
452 | 452 | self.__class__.__name__, self.repository.repo_name, |
|
453 | 453 | self.app_settings_name, self.app_settings_value, |
|
454 | 454 | self.app_settings_type |
|
455 | 455 | ) |
|
456 | 456 | |
|
457 | 457 | |
|
458 | 458 | class RepoRhodeCodeUi(Base, BaseModel): |
|
459 | 459 | __tablename__ = 'repo_rhodecode_ui' |
|
460 | 460 | __table_args__ = ( |
|
461 | 461 | UniqueConstraint( |
|
462 | 462 | 'repository_id', 'ui_section', 'ui_key', |
|
463 | 463 | name='uq_repo_rhodecode_ui_repository_id_section_key'), |
|
464 | 464 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
465 | 465 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} |
|
466 | 466 | ) |
|
467 | 467 | |
|
468 | 468 | repository_id = Column( |
|
469 | 469 | "repository_id", Integer(), ForeignKey('repositories.repo_id'), |
|
470 | 470 | nullable=False) |
|
471 | 471 | ui_id = Column( |
|
472 | 472 | "ui_id", Integer(), nullable=False, unique=True, default=None, |
|
473 | 473 | primary_key=True) |
|
474 | 474 | ui_section = Column( |
|
475 | 475 | "ui_section", String(255), nullable=True, unique=None, default=None) |
|
476 | 476 | ui_key = Column( |
|
477 | 477 | "ui_key", String(255), nullable=True, unique=None, default=None) |
|
478 | 478 | ui_value = Column( |
|
479 | 479 | "ui_value", String(255), nullable=True, unique=None, default=None) |
|
480 | 480 | ui_active = Column( |
|
481 | 481 | "ui_active", Boolean(), nullable=True, unique=None, default=True) |
|
482 | 482 | |
|
483 | 483 | repository = relationship('Repository') |
|
484 | 484 | |
|
485 | 485 | def __repr__(self): |
|
486 | 486 | return '<%s[%s:%s]%s=>%s]>' % ( |
|
487 | 487 | self.__class__.__name__, self.repository.repo_name, |
|
488 | 488 | self.ui_section, self.ui_key, self.ui_value) |
|
489 | 489 | |
|
490 | 490 | |
|
491 | 491 | class User(Base, BaseModel): |
|
492 | 492 | __tablename__ = 'users' |
|
493 | 493 | __table_args__ = ( |
|
494 | 494 | UniqueConstraint('username'), UniqueConstraint('email'), |
|
495 | 495 | Index('u_username_idx', 'username'), |
|
496 | 496 | Index('u_email_idx', 'email'), |
|
497 | 497 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
498 | 498 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} |
|
499 | 499 | ) |
|
500 | 500 | DEFAULT_USER = 'default' |
|
501 | 501 | DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org' |
|
502 | 502 | DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}' |
|
503 | 503 | |
|
504 | 504 | user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
505 | 505 | username = Column("username", String(255), nullable=True, unique=None, default=None) |
|
506 | 506 | password = Column("password", String(255), nullable=True, unique=None, default=None) |
|
507 | 507 | active = Column("active", Boolean(), nullable=True, unique=None, default=True) |
|
508 | 508 | admin = Column("admin", Boolean(), nullable=True, unique=None, default=False) |
|
509 | 509 | name = Column("firstname", String(255), nullable=True, unique=None, default=None) |
|
510 | 510 | lastname = Column("lastname", String(255), nullable=True, unique=None, default=None) |
|
511 | 511 | _email = Column("email", String(255), nullable=True, unique=None, default=None) |
|
512 | 512 | last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None) |
|
513 | 513 | extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None) |
|
514 | 514 | extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None) |
|
515 | 515 | api_key = Column("api_key", String(255), nullable=True, unique=None, default=None) |
|
516 | 516 | inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True) |
|
517 | 517 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
518 | 518 | _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data |
|
519 | 519 | |
|
520 | 520 | user_log = relationship('UserLog') |
|
521 | 521 | user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all') |
|
522 | 522 | |
|
523 | 523 | repositories = relationship('Repository') |
|
524 | 524 | repository_groups = relationship('RepoGroup') |
|
525 | 525 | user_groups = relationship('UserGroup') |
|
526 | 526 | |
|
527 | 527 | user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all') |
|
528 | 528 | followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all') |
|
529 | 529 | |
|
530 | 530 | repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all') |
|
531 | 531 | repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all') |
|
532 | 532 | user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all') |
|
533 | 533 | |
|
534 | 534 | group_member = relationship('UserGroupMember', cascade='all') |
|
535 | 535 | |
|
536 | 536 | notifications = relationship('UserNotification', cascade='all') |
|
537 | 537 | # notifications assigned to this user |
|
538 | 538 | user_created_notifications = relationship('Notification', cascade='all') |
|
539 | 539 | # comments created by this user |
|
540 | 540 | user_comments = relationship('ChangesetComment', cascade='all') |
|
541 | 541 | # user profile extra info |
|
542 | 542 | user_emails = relationship('UserEmailMap', cascade='all') |
|
543 | 543 | user_ip_map = relationship('UserIpMap', cascade='all') |
|
544 | 544 | user_auth_tokens = relationship('UserApiKeys', cascade='all') |
|
545 | 545 | # gists |
|
546 | 546 | user_gists = relationship('Gist', cascade='all') |
|
547 | 547 | # user pull requests |
|
548 | 548 | user_pull_requests = relationship('PullRequest', cascade='all') |
|
549 | 549 | # external identities |
|
550 | 550 | extenal_identities = relationship( |
|
551 | 551 | 'ExternalIdentity', |
|
552 | 552 | primaryjoin="User.user_id==ExternalIdentity.local_user_id", |
|
553 | 553 | cascade='all') |
|
554 | 554 | |
|
555 | 555 | def __unicode__(self): |
|
556 | 556 | return u"<%s('id:%s:%s')>" % (self.__class__.__name__, |
|
557 | 557 | self.user_id, self.username) |
|
558 | 558 | |
|
559 | 559 | @hybrid_property |
|
560 | 560 | def email(self): |
|
561 | 561 | return self._email |
|
562 | 562 | |
|
563 | 563 | @email.setter |
|
564 | 564 | def email(self, val): |
|
565 | 565 | self._email = val.lower() if val else None |
|
566 | 566 | |
|
567 | 567 | @property |
|
568 | 568 | def firstname(self): |
|
569 | 569 | # alias for future |
|
570 | 570 | return self.name |
|
571 | 571 | |
|
572 | 572 | @property |
|
573 | 573 | def emails(self): |
|
574 | 574 | other = UserEmailMap.query().filter(UserEmailMap.user==self).all() |
|
575 | 575 | return [self.email] + [x.email for x in other] |
|
576 | 576 | |
|
577 | 577 | @property |
|
578 | 578 | def auth_tokens(self): |
|
579 | 579 | return [self.api_key] + [x.api_key for x in self.extra_auth_tokens] |
|
580 | 580 | |
|
581 | 581 | @property |
|
582 | 582 | def extra_auth_tokens(self): |
|
583 | 583 | return UserApiKeys.query().filter(UserApiKeys.user == self).all() |
|
584 | 584 | |
|
585 | 585 | @property |
|
586 | 586 | def feed_token(self): |
|
587 | 587 | feed_tokens = UserApiKeys.query()\ |
|
588 | 588 | .filter(UserApiKeys.user == self)\ |
|
589 | 589 | .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED)\ |
|
590 | 590 | .all() |
|
591 | 591 | if feed_tokens: |
|
592 | 592 | return feed_tokens[0].api_key |
|
593 | 593 | else: |
|
594 | 594 | # use the main token so we don't end up with nothing... |
|
595 | 595 | return self.api_key |
|
596 | 596 | |
|
597 | 597 | @classmethod |
|
598 | 598 | def extra_valid_auth_tokens(cls, user, role=None): |
|
599 | 599 | tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\ |
|
600 | 600 | .filter(or_(UserApiKeys.expires == -1, |
|
601 | 601 | UserApiKeys.expires >= time.time())) |
|
602 | 602 | if role: |
|
603 | 603 | tokens = tokens.filter(or_(UserApiKeys.role == role, |
|
604 | 604 | UserApiKeys.role == UserApiKeys.ROLE_ALL)) |
|
605 | 605 | return tokens.all() |
|
606 | 606 | |
|
607 | 607 | @property |
|
608 | 608 | def ip_addresses(self): |
|
609 | 609 | ret = UserIpMap.query().filter(UserIpMap.user == self).all() |
|
610 | 610 | return [x.ip_addr for x in ret] |
|
611 | 611 | |
|
612 | 612 | @property |
|
613 | 613 | def username_and_name(self): |
|
614 | 614 | return '%s (%s %s)' % (self.username, self.firstname, self.lastname) |
|
615 | 615 | |
|
616 | 616 | @property |
|
617 | 617 | def username_or_name_or_email(self): |
|
618 | 618 | full_name = self.full_name if self.full_name is not ' ' else None |
|
619 | 619 | return self.username or full_name or self.email |
|
620 | 620 | |
|
621 | 621 | @property |
|
622 | 622 | def full_name(self): |
|
623 | 623 | return '%s %s' % (self.firstname, self.lastname) |
|
624 | 624 | |
|
625 | 625 | @property |
|
626 | 626 | def full_name_or_username(self): |
|
627 | 627 | return ('%s %s' % (self.firstname, self.lastname) |
|
628 | 628 | if (self.firstname and self.lastname) else self.username) |
|
629 | 629 | |
|
630 | 630 | @property |
|
631 | 631 | def full_contact(self): |
|
632 | 632 | return '%s %s <%s>' % (self.firstname, self.lastname, self.email) |
|
633 | 633 | |
|
634 | 634 | @property |
|
635 | 635 | def short_contact(self): |
|
636 | 636 | return '%s %s' % (self.firstname, self.lastname) |
|
637 | 637 | |
|
638 | 638 | @property |
|
639 | 639 | def is_admin(self): |
|
640 | 640 | return self.admin |
|
641 | 641 | |
|
642 | 642 | @property |
|
643 | 643 | def AuthUser(self): |
|
644 | 644 | """ |
|
645 | 645 | Returns instance of AuthUser for this user |
|
646 | 646 | """ |
|
647 | 647 | from rhodecode.lib.auth import AuthUser |
|
648 | 648 | return AuthUser(user_id=self.user_id, api_key=self.api_key, |
|
649 | 649 | username=self.username) |
|
650 | 650 | |
|
651 | 651 | @hybrid_property |
|
652 | 652 | def user_data(self): |
|
653 | 653 | if not self._user_data: |
|
654 | 654 | return {} |
|
655 | 655 | |
|
656 | 656 | try: |
|
657 | 657 | return json.loads(self._user_data) |
|
658 | 658 | except TypeError: |
|
659 | 659 | return {} |
|
660 | 660 | |
|
661 | 661 | @user_data.setter |
|
662 | 662 | def user_data(self, val): |
|
663 | 663 | if not isinstance(val, dict): |
|
664 | 664 | raise Exception('user_data must be dict, got %s' % type(val)) |
|
665 | 665 | try: |
|
666 | 666 | self._user_data = json.dumps(val) |
|
667 | 667 | except Exception: |
|
668 | 668 | log.error(traceback.format_exc()) |
|
669 | 669 | |
|
670 | 670 | @classmethod |
|
671 | 671 | def get_by_username(cls, username, case_insensitive=False, |
|
672 | 672 | cache=False, identity_cache=False): |
|
673 | 673 | session = Session() |
|
674 | 674 | |
|
675 | 675 | if case_insensitive: |
|
676 | 676 | q = cls.query().filter( |
|
677 | 677 | func.lower(cls.username) == func.lower(username)) |
|
678 | 678 | else: |
|
679 | 679 | q = cls.query().filter(cls.username == username) |
|
680 | 680 | |
|
681 | 681 | if cache: |
|
682 | 682 | if identity_cache: |
|
683 | 683 | val = cls.identity_cache(session, 'username', username) |
|
684 | 684 | if val: |
|
685 | 685 | return val |
|
686 | 686 | else: |
|
687 | 687 | q = q.options( |
|
688 | 688 | FromCache("sql_cache_short", |
|
689 | 689 | "get_user_by_name_%s" % _hash_key(username))) |
|
690 | 690 | |
|
691 | 691 | return q.scalar() |
|
692 | 692 | |
|
693 | 693 | @classmethod |
|
694 | 694 | def get_by_auth_token(cls, auth_token, cache=False, fallback=True): |
|
695 | 695 | q = cls.query().filter(cls.api_key == auth_token) |
|
696 | 696 | |
|
697 | 697 | if cache: |
|
698 | 698 | q = q.options(FromCache("sql_cache_short", |
|
699 | 699 | "get_auth_token_%s" % auth_token)) |
|
700 | 700 | res = q.scalar() |
|
701 | 701 | |
|
702 | 702 | if fallback and not res: |
|
703 | 703 | #fallback to additional keys |
|
704 | 704 | _res = UserApiKeys.query()\ |
|
705 | 705 | .filter(UserApiKeys.api_key == auth_token)\ |
|
706 | 706 | .filter(or_(UserApiKeys.expires == -1, |
|
707 | 707 | UserApiKeys.expires >= time.time()))\ |
|
708 | 708 | .first() |
|
709 | 709 | if _res: |
|
710 | 710 | res = _res.user |
|
711 | 711 | return res |
|
712 | 712 | |
|
713 | 713 | @classmethod |
|
714 | 714 | def get_by_email(cls, email, case_insensitive=False, cache=False): |
|
715 | 715 | |
|
716 | 716 | if case_insensitive: |
|
717 | 717 | q = cls.query().filter(func.lower(cls.email) == func.lower(email)) |
|
718 | 718 | |
|
719 | 719 | else: |
|
720 | 720 | q = cls.query().filter(cls.email == email) |
|
721 | 721 | |
|
722 | 722 | if cache: |
|
723 | 723 | q = q.options(FromCache("sql_cache_short", |
|
724 | 724 | "get_email_key_%s" % _hash_key(email))) |
|
725 | 725 | |
|
726 | 726 | ret = q.scalar() |
|
727 | 727 | if ret is None: |
|
728 | 728 | q = UserEmailMap.query() |
|
729 | 729 | # try fetching in alternate email map |
|
730 | 730 | if case_insensitive: |
|
731 | 731 | q = q.filter(func.lower(UserEmailMap.email) == func.lower(email)) |
|
732 | 732 | else: |
|
733 | 733 | q = q.filter(UserEmailMap.email == email) |
|
734 | 734 | q = q.options(joinedload(UserEmailMap.user)) |
|
735 | 735 | if cache: |
|
736 | 736 | q = q.options(FromCache("sql_cache_short", |
|
737 | 737 | "get_email_map_key_%s" % email)) |
|
738 | 738 | ret = getattr(q.scalar(), 'user', None) |
|
739 | 739 | |
|
740 | 740 | return ret |
|
741 | 741 | |
|
742 | 742 | @classmethod |
|
743 | 743 | def get_from_cs_author(cls, author): |
|
744 | 744 | """ |
|
745 | 745 | Tries to get User objects out of commit author string |
|
746 | 746 | |
|
747 | 747 | :param author: |
|
748 | 748 | """ |
|
749 | 749 | from rhodecode.lib.helpers import email, author_name |
|
750 | 750 | # Valid email in the attribute passed, see if they're in the system |
|
751 | 751 | _email = email(author) |
|
752 | 752 | if _email: |
|
753 | 753 | user = cls.get_by_email(_email, case_insensitive=True) |
|
754 | 754 | if user: |
|
755 | 755 | return user |
|
756 | 756 | # Maybe we can match by username? |
|
757 | 757 | _author = author_name(author) |
|
758 | 758 | user = cls.get_by_username(_author, case_insensitive=True) |
|
759 | 759 | if user: |
|
760 | 760 | return user |
|
761 | 761 | |
|
762 | 762 | def update_userdata(self, **kwargs): |
|
763 | 763 | usr = self |
|
764 | 764 | old = usr.user_data |
|
765 | 765 | old.update(**kwargs) |
|
766 | 766 | usr.user_data = old |
|
767 | 767 | Session().add(usr) |
|
768 | 768 | log.debug('updated userdata with ', kwargs) |
|
769 | 769 | |
|
770 | 770 | def update_lastlogin(self): |
|
771 | 771 | """Update user lastlogin""" |
|
772 | 772 | self.last_login = datetime.datetime.now() |
|
773 | 773 | Session().add(self) |
|
774 | 774 | log.debug('updated user %s lastlogin', self.username) |
|
775 | 775 | |
|
776 | 776 | def update_lastactivity(self): |
|
777 | 777 | """Update user lastactivity""" |
|
778 | 778 | usr = self |
|
779 | 779 | old = usr.user_data |
|
780 | 780 | old.update({'last_activity': time.time()}) |
|
781 | 781 | usr.user_data = old |
|
782 | 782 | Session().add(usr) |
|
783 | 783 | log.debug('updated user %s lastactivity', usr.username) |
|
784 | 784 | |
|
785 | 785 | def update_password(self, new_password, change_api_key=False): |
|
786 | 786 | from rhodecode.lib.auth import get_crypt_password,generate_auth_token |
|
787 | 787 | |
|
788 | 788 | self.password = get_crypt_password(new_password) |
|
789 | 789 | if change_api_key: |
|
790 | 790 | self.api_key = generate_auth_token(self.username) |
|
791 | 791 | Session().add(self) |
|
792 | 792 | |
|
793 | 793 | @classmethod |
|
794 | 794 | def get_first_super_admin(cls): |
|
795 | 795 | user = User.query().filter(User.admin == true()).first() |
|
796 | 796 | if user is None: |
|
797 | 797 | raise Exception('FATAL: Missing administrative account!') |
|
798 | 798 | return user |
|
799 | 799 | |
|
800 | 800 | @classmethod |
|
801 | 801 | def get_all_super_admins(cls): |
|
802 | 802 | """ |
|
803 | 803 | Returns all admin accounts sorted by username |
|
804 | 804 | """ |
|
805 | 805 | return User.query().filter(User.admin == true())\ |
|
806 | 806 | .order_by(User.username.asc()).all() |
|
807 | 807 | |
|
808 | 808 | @classmethod |
|
809 | 809 | def get_default_user(cls, cache=False): |
|
810 | 810 | user = User.get_by_username(User.DEFAULT_USER, cache=cache) |
|
811 | 811 | if user is None: |
|
812 | 812 | raise Exception('FATAL: Missing default account!') |
|
813 | 813 | return user |
|
814 | 814 | |
|
815 | 815 | def _get_default_perms(self, user, suffix=''): |
|
816 | 816 | from rhodecode.model.permission import PermissionModel |
|
817 | 817 | return PermissionModel().get_default_perms(user.user_perms, suffix) |
|
818 | 818 | |
|
819 | 819 | def get_default_perms(self, suffix=''): |
|
820 | 820 | return self._get_default_perms(self, suffix) |
|
821 | 821 | |
|
822 | 822 | def get_api_data(self, include_secrets=False, details='full'): |
|
823 | 823 | """ |
|
824 | 824 | Common function for generating user related data for API |
|
825 | 825 | |
|
826 | 826 | :param include_secrets: By default secrets in the API data will be replaced |
|
827 | 827 | by a placeholder value to prevent exposing this data by accident. In case |
|
828 | 828 | this data shall be exposed, set this flag to ``True``. |
|
829 | 829 | |
|
830 | 830 | :param details: details can be 'basic|full' basic gives only a subset of |
|
831 | 831 | the available user information that includes user_id, name and emails. |
|
832 | 832 | """ |
|
833 | 833 | user = self |
|
834 | 834 | user_data = self.user_data |
|
835 | 835 | data = { |
|
836 | 836 | 'user_id': user.user_id, |
|
837 | 837 | 'username': user.username, |
|
838 | 838 | 'firstname': user.name, |
|
839 | 839 | 'lastname': user.lastname, |
|
840 | 840 | 'email': user.email, |
|
841 | 841 | 'emails': user.emails, |
|
842 | 842 | } |
|
843 | 843 | if details == 'basic': |
|
844 | 844 | return data |
|
845 | 845 | |
|
846 | 846 | api_key_length = 40 |
|
847 | 847 | api_key_replacement = '*' * api_key_length |
|
848 | 848 | |
|
849 | 849 | extras = { |
|
850 | 850 | 'api_key': api_key_replacement, |
|
851 | 851 | 'api_keys': [api_key_replacement], |
|
852 | 852 | 'active': user.active, |
|
853 | 853 | 'admin': user.admin, |
|
854 | 854 | 'extern_type': user.extern_type, |
|
855 | 855 | 'extern_name': user.extern_name, |
|
856 | 856 | 'last_login': user.last_login, |
|
857 | 857 | 'ip_addresses': user.ip_addresses, |
|
858 | 858 | 'language': user_data.get('language') |
|
859 | 859 | } |
|
860 | 860 | data.update(extras) |
|
861 | 861 | |
|
862 | 862 | if include_secrets: |
|
863 | 863 | data['api_key'] = user.api_key |
|
864 | 864 | data['api_keys'] = user.auth_tokens |
|
865 | 865 | return data |
|
866 | 866 | |
|
867 | 867 | def __json__(self): |
|
868 | 868 | data = { |
|
869 | 869 | 'full_name': self.full_name, |
|
870 | 870 | 'full_name_or_username': self.full_name_or_username, |
|
871 | 871 | 'short_contact': self.short_contact, |
|
872 | 872 | 'full_contact': self.full_contact, |
|
873 | 873 | } |
|
874 | 874 | data.update(self.get_api_data()) |
|
875 | 875 | return data |
|
876 | 876 | |
|
877 | 877 | |
|
878 | 878 | class UserApiKeys(Base, BaseModel): |
|
879 | 879 | __tablename__ = 'user_api_keys' |
|
880 | 880 | __table_args__ = ( |
|
881 | 881 | Index('uak_api_key_idx', 'api_key'), |
|
882 | 882 | Index('uak_api_key_expires_idx', 'api_key', 'expires'), |
|
883 | 883 | UniqueConstraint('api_key'), |
|
884 | 884 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
885 | 885 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} |
|
886 | 886 | ) |
|
887 | 887 | __mapper_args__ = {} |
|
888 | 888 | |
|
889 | 889 | # ApiKey role |
|
890 | 890 | ROLE_ALL = 'token_role_all' |
|
891 | 891 | ROLE_HTTP = 'token_role_http' |
|
892 | 892 | ROLE_VCS = 'token_role_vcs' |
|
893 | 893 | ROLE_API = 'token_role_api' |
|
894 | 894 | ROLE_FEED = 'token_role_feed' |
|
895 | 895 | ROLES = [ROLE_ALL, ROLE_HTTP, ROLE_VCS, ROLE_API, ROLE_FEED] |
|
896 | 896 | |
|
897 | 897 | user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
898 | 898 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) |
|
899 | 899 | api_key = Column("api_key", String(255), nullable=False, unique=True) |
|
900 | 900 | description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql')) |
|
901 | 901 | expires = Column('expires', Float(53), nullable=False) |
|
902 | 902 | role = Column('role', String(255), nullable=True) |
|
903 | 903 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
904 | 904 | |
|
905 | 905 | user = relationship('User', lazy='joined') |
|
906 | 906 | |
|
907 | 907 | @classmethod |
|
908 | 908 | def _get_role_name(cls, role): |
|
909 | 909 | return { |
|
910 | 910 | cls.ROLE_ALL: _('all'), |
|
911 | 911 | cls.ROLE_HTTP: _('http/web interface'), |
|
912 | 912 | cls.ROLE_VCS: _('vcs (git/hg/svn protocol)'), |
|
913 | 913 | cls.ROLE_API: _('api calls'), |
|
914 | 914 | cls.ROLE_FEED: _('feed access'), |
|
915 | 915 | }.get(role, role) |
|
916 | 916 | |
|
917 | 917 | @property |
|
918 | 918 | def expired(self): |
|
919 | 919 | if self.expires == -1: |
|
920 | 920 | return False |
|
921 | 921 | return time.time() > self.expires |
|
922 | 922 | |
|
923 | 923 | @property |
|
924 | 924 | def role_humanized(self): |
|
925 | 925 | return self._get_role_name(self.role) |
|
926 | 926 | |
|
927 | 927 | |
|
928 | 928 | class UserEmailMap(Base, BaseModel): |
|
929 | 929 | __tablename__ = 'user_email_map' |
|
930 | 930 | __table_args__ = ( |
|
931 | 931 | Index('uem_email_idx', 'email'), |
|
932 | 932 | UniqueConstraint('email'), |
|
933 | 933 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
934 | 934 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} |
|
935 | 935 | ) |
|
936 | 936 | __mapper_args__ = {} |
|
937 | 937 | |
|
938 | 938 | email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
939 | 939 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) |
|
940 | 940 | _email = Column("email", String(255), nullable=True, unique=False, default=None) |
|
941 | 941 | user = relationship('User', lazy='joined') |
|
942 | 942 | |
|
943 | 943 | @validates('_email') |
|
944 | 944 | def validate_email(self, key, email): |
|
945 | 945 | # check if this email is not main one |
|
946 | 946 | main_email = Session().query(User).filter(User.email == email).scalar() |
|
947 | 947 | if main_email is not None: |
|
948 | 948 | raise AttributeError('email %s is present is user table' % email) |
|
949 | 949 | return email |
|
950 | 950 | |
|
951 | 951 | @hybrid_property |
|
952 | 952 | def email(self): |
|
953 | 953 | return self._email |
|
954 | 954 | |
|
955 | 955 | @email.setter |
|
956 | 956 | def email(self, val): |
|
957 | 957 | self._email = val.lower() if val else None |
|
958 | 958 | |
|
959 | 959 | |
|
960 | 960 | class UserIpMap(Base, BaseModel): |
|
961 | 961 | __tablename__ = 'user_ip_map' |
|
962 | 962 | __table_args__ = ( |
|
963 | 963 | UniqueConstraint('user_id', 'ip_addr'), |
|
964 | 964 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
965 | 965 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} |
|
966 | 966 | ) |
|
967 | 967 | __mapper_args__ = {} |
|
968 | 968 | |
|
969 | 969 | ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
970 | 970 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) |
|
971 | 971 | ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None) |
|
972 | 972 | active = Column("active", Boolean(), nullable=True, unique=None, default=True) |
|
973 | 973 | description = Column("description", String(10000), nullable=True, unique=None, default=None) |
|
974 | 974 | user = relationship('User', lazy='joined') |
|
975 | 975 | |
|
976 | 976 | @classmethod |
|
977 | 977 | def _get_ip_range(cls, ip_addr): |
|
978 | 978 | net = ipaddress.ip_network(ip_addr, strict=False) |
|
979 | 979 | return [str(net.network_address), str(net.broadcast_address)] |
|
980 | 980 | |
|
981 | 981 | def __json__(self): |
|
982 | 982 | return { |
|
983 | 983 | 'ip_addr': self.ip_addr, |
|
984 | 984 | 'ip_range': self._get_ip_range(self.ip_addr), |
|
985 | 985 | } |
|
986 | 986 | |
|
987 | 987 | def __unicode__(self): |
|
988 | 988 | return u"<%s('user_id:%s=>%s')>" % (self.__class__.__name__, |
|
989 | 989 | self.user_id, self.ip_addr) |
|
990 | 990 | |
|
991 | 991 | class UserLog(Base, BaseModel): |
|
992 | 992 | __tablename__ = 'user_logs' |
|
993 | 993 | __table_args__ = ( |
|
994 | 994 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
995 | 995 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, |
|
996 | 996 | ) |
|
997 | 997 | user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
998 | 998 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) |
|
999 | 999 | username = Column("username", String(255), nullable=True, unique=None, default=None) |
|
1000 | 1000 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True) |
|
1001 | 1001 | repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None) |
|
1002 | 1002 | user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None) |
|
1003 | 1003 | action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None) |
|
1004 | 1004 | action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None) |
|
1005 | 1005 | |
|
1006 | 1006 | def __unicode__(self): |
|
1007 | 1007 | return u"<%s('id:%s:%s')>" % (self.__class__.__name__, |
|
1008 | 1008 | self.repository_name, |
|
1009 | 1009 | self.action) |
|
1010 | 1010 | |
|
1011 | 1011 | @property |
|
1012 | 1012 | def action_as_day(self): |
|
1013 | 1013 | return datetime.date(*self.action_date.timetuple()[:3]) |
|
1014 | 1014 | |
|
1015 | 1015 | user = relationship('User') |
|
1016 | 1016 | repository = relationship('Repository', cascade='') |
|
1017 | 1017 | |
|
1018 | 1018 | |
|
1019 | 1019 | class UserGroup(Base, BaseModel): |
|
1020 | 1020 | __tablename__ = 'users_groups' |
|
1021 | 1021 | __table_args__ = ( |
|
1022 | 1022 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
1023 | 1023 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, |
|
1024 | 1024 | ) |
|
1025 | 1025 | |
|
1026 | 1026 | users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
1027 | 1027 | users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None) |
|
1028 | 1028 | user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None) |
|
1029 | 1029 | users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None) |
|
1030 | 1030 | inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True) |
|
1031 | 1031 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None) |
|
1032 | 1032 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
1033 | 1033 | _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data |
|
1034 | 1034 | |
|
1035 | 1035 | members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined") |
|
1036 | 1036 | users_group_to_perm = relationship('UserGroupToPerm', cascade='all') |
|
1037 | 1037 | users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all') |
|
1038 | 1038 | users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all') |
|
1039 | 1039 | user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all') |
|
1040 | 1040 | user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all') |
|
1041 | 1041 | |
|
1042 | 1042 | user = relationship('User') |
|
1043 | 1043 | |
|
1044 | 1044 | @hybrid_property |
|
1045 | 1045 | def group_data(self): |
|
1046 | 1046 | if not self._group_data: |
|
1047 | 1047 | return {} |
|
1048 | 1048 | |
|
1049 | 1049 | try: |
|
1050 | 1050 | return json.loads(self._group_data) |
|
1051 | 1051 | except TypeError: |
|
1052 | 1052 | return {} |
|
1053 | 1053 | |
|
1054 | 1054 | @group_data.setter |
|
1055 | 1055 | def group_data(self, val): |
|
1056 | 1056 | try: |
|
1057 | 1057 | self._group_data = json.dumps(val) |
|
1058 | 1058 | except Exception: |
|
1059 | 1059 | log.error(traceback.format_exc()) |
|
1060 | 1060 | |
|
1061 | 1061 | def __unicode__(self): |
|
1062 | 1062 | return u"<%s('id:%s:%s')>" % (self.__class__.__name__, |
|
1063 | 1063 | self.users_group_id, |
|
1064 | 1064 | self.users_group_name) |
|
1065 | 1065 | |
|
1066 | 1066 | @classmethod |
|
1067 | 1067 | def get_by_group_name(cls, group_name, cache=False, |
|
1068 | 1068 | case_insensitive=False): |
|
1069 | 1069 | if case_insensitive: |
|
1070 | 1070 | q = cls.query().filter(func.lower(cls.users_group_name) == |
|
1071 | 1071 | func.lower(group_name)) |
|
1072 | 1072 | |
|
1073 | 1073 | else: |
|
1074 | 1074 | q = cls.query().filter(cls.users_group_name == group_name) |
|
1075 | 1075 | if cache: |
|
1076 | 1076 | q = q.options(FromCache( |
|
1077 | 1077 | "sql_cache_short", |
|
1078 | 1078 | "get_group_%s" % _hash_key(group_name))) |
|
1079 | 1079 | return q.scalar() |
|
1080 | 1080 | |
|
1081 | 1081 | @classmethod |
|
1082 | 1082 | def get(cls, user_group_id, cache=False): |
|
1083 | 1083 | user_group = cls.query() |
|
1084 | 1084 | if cache: |
|
1085 | 1085 | user_group = user_group.options(FromCache("sql_cache_short", |
|
1086 | 1086 | "get_users_group_%s" % user_group_id)) |
|
1087 | 1087 | return user_group.get(user_group_id) |
|
1088 | 1088 | |
|
1089 | 1089 | def permissions(self, with_admins=True, with_owner=True): |
|
1090 | 1090 | q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self) |
|
1091 | 1091 | q = q.options(joinedload(UserUserGroupToPerm.user_group), |
|
1092 | 1092 | joinedload(UserUserGroupToPerm.user), |
|
1093 | 1093 | joinedload(UserUserGroupToPerm.permission),) |
|
1094 | 1094 | |
|
1095 | 1095 | # get owners and admins and permissions. We do a trick of re-writing |
|
1096 | 1096 | # objects from sqlalchemy to named-tuples due to sqlalchemy session |
|
1097 | 1097 | # has a global reference and changing one object propagates to all |
|
1098 | 1098 | # others. This means if admin is also an owner admin_row that change |
|
1099 | 1099 | # would propagate to both objects |
|
1100 | 1100 | perm_rows = [] |
|
1101 | 1101 | for _usr in q.all(): |
|
1102 | 1102 | usr = AttributeDict(_usr.user.get_dict()) |
|
1103 | 1103 | usr.permission = _usr.permission.permission_name |
|
1104 | 1104 | perm_rows.append(usr) |
|
1105 | 1105 | |
|
1106 | 1106 | # filter the perm rows by 'default' first and then sort them by |
|
1107 | 1107 | # admin,write,read,none permissions sorted again alphabetically in |
|
1108 | 1108 | # each group |
|
1109 | 1109 | perm_rows = sorted(perm_rows, key=display_sort) |
|
1110 | 1110 | |
|
1111 | 1111 | _admin_perm = 'usergroup.admin' |
|
1112 | 1112 | owner_row = [] |
|
1113 | 1113 | if with_owner: |
|
1114 | 1114 | usr = AttributeDict(self.user.get_dict()) |
|
1115 | 1115 | usr.owner_row = True |
|
1116 | 1116 | usr.permission = _admin_perm |
|
1117 | 1117 | owner_row.append(usr) |
|
1118 | 1118 | |
|
1119 | 1119 | super_admin_rows = [] |
|
1120 | 1120 | if with_admins: |
|
1121 | 1121 | for usr in User.get_all_super_admins(): |
|
1122 | 1122 | # if this admin is also owner, don't double the record |
|
1123 | 1123 | if usr.user_id == owner_row[0].user_id: |
|
1124 | 1124 | owner_row[0].admin_row = True |
|
1125 | 1125 | else: |
|
1126 | 1126 | usr = AttributeDict(usr.get_dict()) |
|
1127 | 1127 | usr.admin_row = True |
|
1128 | 1128 | usr.permission = _admin_perm |
|
1129 | 1129 | super_admin_rows.append(usr) |
|
1130 | 1130 | |
|
1131 | 1131 | return super_admin_rows + owner_row + perm_rows |
|
1132 | 1132 | |
|
1133 | 1133 | def permission_user_groups(self): |
|
1134 | 1134 | q = UserGroupUserGroupToPerm.query().filter(UserGroupUserGroupToPerm.target_user_group == self) |
|
1135 | 1135 | q = q.options(joinedload(UserGroupUserGroupToPerm.user_group), |
|
1136 | 1136 | joinedload(UserGroupUserGroupToPerm.target_user_group), |
|
1137 | 1137 | joinedload(UserGroupUserGroupToPerm.permission),) |
|
1138 | 1138 | |
|
1139 | 1139 | perm_rows = [] |
|
1140 | 1140 | for _user_group in q.all(): |
|
1141 | 1141 | usr = AttributeDict(_user_group.user_group.get_dict()) |
|
1142 | 1142 | usr.permission = _user_group.permission.permission_name |
|
1143 | 1143 | perm_rows.append(usr) |
|
1144 | 1144 | |
|
1145 | 1145 | return perm_rows |
|
1146 | 1146 | |
|
1147 | 1147 | def _get_default_perms(self, user_group, suffix=''): |
|
1148 | 1148 | from rhodecode.model.permission import PermissionModel |
|
1149 | 1149 | return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix) |
|
1150 | 1150 | |
|
1151 | 1151 | def get_default_perms(self, suffix=''): |
|
1152 | 1152 | return self._get_default_perms(self, suffix) |
|
1153 | 1153 | |
|
1154 | 1154 | def get_api_data(self, with_group_members=True, include_secrets=False): |
|
1155 | 1155 | """ |
|
1156 | 1156 | :param include_secrets: See :meth:`User.get_api_data`, this parameter is |
|
1157 | 1157 | basically forwarded. |
|
1158 | 1158 | |
|
1159 | 1159 | """ |
|
1160 | 1160 | user_group = self |
|
1161 | 1161 | |
|
1162 | 1162 | data = { |
|
1163 | 1163 | 'users_group_id': user_group.users_group_id, |
|
1164 | 1164 | 'group_name': user_group.users_group_name, |
|
1165 | 1165 | 'group_description': user_group.user_group_description, |
|
1166 | 1166 | 'active': user_group.users_group_active, |
|
1167 | 1167 | 'owner': user_group.user.username, |
|
1168 | 1168 | } |
|
1169 | 1169 | if with_group_members: |
|
1170 | 1170 | users = [] |
|
1171 | 1171 | for user in user_group.members: |
|
1172 | 1172 | user = user.user |
|
1173 | 1173 | users.append(user.get_api_data(include_secrets=include_secrets)) |
|
1174 | 1174 | data['users'] = users |
|
1175 | 1175 | |
|
1176 | 1176 | return data |
|
1177 | 1177 | |
|
1178 | 1178 | |
|
1179 | 1179 | class UserGroupMember(Base, BaseModel): |
|
1180 | 1180 | __tablename__ = 'users_groups_members' |
|
1181 | 1181 | __table_args__ = ( |
|
1182 | 1182 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
1183 | 1183 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, |
|
1184 | 1184 | ) |
|
1185 | 1185 | |
|
1186 | 1186 | users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
1187 | 1187 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
|
1188 | 1188 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
1189 | 1189 | |
|
1190 | 1190 | user = relationship('User', lazy='joined') |
|
1191 | 1191 | users_group = relationship('UserGroup') |
|
1192 | 1192 | |
|
1193 | 1193 | def __init__(self, gr_id='', u_id=''): |
|
1194 | 1194 | self.users_group_id = gr_id |
|
1195 | 1195 | self.user_id = u_id |
|
1196 | 1196 | |
|
1197 | 1197 | |
|
1198 | 1198 | class RepositoryField(Base, BaseModel): |
|
1199 | 1199 | __tablename__ = 'repositories_fields' |
|
1200 | 1200 | __table_args__ = ( |
|
1201 | 1201 | UniqueConstraint('repository_id', 'field_key'), # no-multi field |
|
1202 | 1202 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
1203 | 1203 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, |
|
1204 | 1204 | ) |
|
1205 | 1205 | PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields |
|
1206 | 1206 | |
|
1207 | 1207 | repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
1208 | 1208 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) |
|
1209 | 1209 | field_key = Column("field_key", String(250)) |
|
1210 | 1210 | field_label = Column("field_label", String(1024), nullable=False) |
|
1211 | 1211 | field_value = Column("field_value", String(10000), nullable=False) |
|
1212 | 1212 | field_desc = Column("field_desc", String(1024), nullable=False) |
|
1213 | 1213 | field_type = Column("field_type", String(255), nullable=False, unique=None) |
|
1214 | 1214 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
1215 | 1215 | |
|
1216 | 1216 | repository = relationship('Repository') |
|
1217 | 1217 | |
|
1218 | 1218 | @property |
|
1219 | 1219 | def field_key_prefixed(self): |
|
1220 | 1220 | return 'ex_%s' % self.field_key |
|
1221 | 1221 | |
|
1222 | 1222 | @classmethod |
|
1223 | 1223 | def un_prefix_key(cls, key): |
|
1224 | 1224 | if key.startswith(cls.PREFIX): |
|
1225 | 1225 | return key[len(cls.PREFIX):] |
|
1226 | 1226 | return key |
|
1227 | 1227 | |
|
1228 | 1228 | @classmethod |
|
1229 | 1229 | def get_by_key_name(cls, key, repo): |
|
1230 | 1230 | row = cls.query()\ |
|
1231 | 1231 | .filter(cls.repository == repo)\ |
|
1232 | 1232 | .filter(cls.field_key == key).scalar() |
|
1233 | 1233 | return row |
|
1234 | 1234 | |
|
1235 | 1235 | |
|
1236 | 1236 | class Repository(Base, BaseModel): |
|
1237 | 1237 | __tablename__ = 'repositories' |
|
1238 | 1238 | __table_args__ = ( |
|
1239 | 1239 | Index('r_repo_name_idx', 'repo_name', mysql_length=255), |
|
1240 | 1240 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
1241 | 1241 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, |
|
1242 | 1242 | ) |
|
1243 | 1243 | DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}' |
|
1244 | 1244 | DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}' |
|
1245 | 1245 | |
|
1246 | 1246 | STATE_CREATED = 'repo_state_created' |
|
1247 | 1247 | STATE_PENDING = 'repo_state_pending' |
|
1248 | 1248 | STATE_ERROR = 'repo_state_error' |
|
1249 | 1249 | |
|
1250 | 1250 | LOCK_AUTOMATIC = 'lock_auto' |
|
1251 | 1251 | LOCK_API = 'lock_api' |
|
1252 | 1252 | LOCK_WEB = 'lock_web' |
|
1253 | 1253 | LOCK_PULL = 'lock_pull' |
|
1254 | 1254 | |
|
1255 | 1255 | NAME_SEP = URL_SEP |
|
1256 | 1256 | |
|
1257 | 1257 | repo_id = Column( |
|
1258 | 1258 | "repo_id", Integer(), nullable=False, unique=True, default=None, |
|
1259 | 1259 | primary_key=True) |
|
1260 | 1260 | _repo_name = Column( |
|
1261 | 1261 | "repo_name", Text(), nullable=False, default=None) |
|
1262 | 1262 | _repo_name_hash = Column( |
|
1263 | 1263 | "repo_name_hash", String(255), nullable=False, unique=True) |
|
1264 | 1264 | repo_state = Column("repo_state", String(255), nullable=True) |
|
1265 | 1265 | |
|
1266 | 1266 | clone_uri = Column( |
|
1267 | 1267 | "clone_uri", EncryptedTextValue(), nullable=True, unique=False, |
|
1268 | 1268 | default=None) |
|
1269 | 1269 | repo_type = Column( |
|
1270 | 1270 | "repo_type", String(255), nullable=False, unique=False, default=None) |
|
1271 | 1271 | user_id = Column( |
|
1272 | 1272 | "user_id", Integer(), ForeignKey('users.user_id'), nullable=False, |
|
1273 | 1273 | unique=False, default=None) |
|
1274 | 1274 | private = Column( |
|
1275 | 1275 | "private", Boolean(), nullable=True, unique=None, default=None) |
|
1276 | 1276 | enable_statistics = Column( |
|
1277 | 1277 | "statistics", Boolean(), nullable=True, unique=None, default=True) |
|
1278 | 1278 | enable_downloads = Column( |
|
1279 | 1279 | "downloads", Boolean(), nullable=True, unique=None, default=True) |
|
1280 | 1280 | description = Column( |
|
1281 | 1281 | "description", String(10000), nullable=True, unique=None, default=None) |
|
1282 | 1282 | created_on = Column( |
|
1283 | 1283 | 'created_on', DateTime(timezone=False), nullable=True, unique=None, |
|
1284 | 1284 | default=datetime.datetime.now) |
|
1285 | 1285 | updated_on = Column( |
|
1286 | 1286 | 'updated_on', DateTime(timezone=False), nullable=True, unique=None, |
|
1287 | 1287 | default=datetime.datetime.now) |
|
1288 | 1288 | _landing_revision = Column( |
|
1289 | 1289 | "landing_revision", String(255), nullable=False, unique=False, |
|
1290 | 1290 | default=None) |
|
1291 | 1291 | enable_locking = Column( |
|
1292 | 1292 | "enable_locking", Boolean(), nullable=False, unique=None, |
|
1293 | 1293 | default=False) |
|
1294 | 1294 | _locked = Column( |
|
1295 | 1295 | "locked", String(255), nullable=True, unique=False, default=None) |
|
1296 | 1296 | _changeset_cache = Column( |
|
1297 | 1297 | "changeset_cache", LargeBinary(), nullable=True) # JSON data |
|
1298 | 1298 | |
|
1299 | 1299 | fork_id = Column( |
|
1300 | 1300 | "fork_id", Integer(), ForeignKey('repositories.repo_id'), |
|
1301 | 1301 | nullable=True, unique=False, default=None) |
|
1302 | 1302 | group_id = Column( |
|
1303 | 1303 | "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True, |
|
1304 | 1304 | unique=False, default=None) |
|
1305 | 1305 | |
|
1306 | 1306 | user = relationship('User', lazy='joined') |
|
1307 | 1307 | fork = relationship('Repository', remote_side=repo_id, lazy='joined') |
|
1308 | 1308 | group = relationship('RepoGroup', lazy='joined') |
|
1309 | 1309 | repo_to_perm = relationship( |
|
1310 | 1310 | 'UserRepoToPerm', cascade='all', |
|
1311 | 1311 | order_by='UserRepoToPerm.repo_to_perm_id') |
|
1312 | 1312 | users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all') |
|
1313 | 1313 | stats = relationship('Statistics', cascade='all', uselist=False) |
|
1314 | 1314 | |
|
1315 | 1315 | followers = relationship( |
|
1316 | 1316 | 'UserFollowing', |
|
1317 | 1317 | primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id', |
|
1318 | 1318 | cascade='all') |
|
1319 | 1319 | extra_fields = relationship( |
|
1320 | 1320 | 'RepositoryField', cascade="all, delete, delete-orphan") |
|
1321 | 1321 | logs = relationship('UserLog') |
|
1322 | 1322 | comments = relationship( |
|
1323 | 1323 | 'ChangesetComment', cascade="all, delete, delete-orphan") |
|
1324 | 1324 | pull_requests_source = relationship( |
|
1325 | 1325 | 'PullRequest', |
|
1326 | 1326 | primaryjoin='PullRequest.source_repo_id==Repository.repo_id', |
|
1327 | 1327 | cascade="all, delete, delete-orphan") |
|
1328 | 1328 | pull_requests_target = relationship( |
|
1329 | 1329 | 'PullRequest', |
|
1330 | 1330 | primaryjoin='PullRequest.target_repo_id==Repository.repo_id', |
|
1331 | 1331 | cascade="all, delete, delete-orphan") |
|
1332 | 1332 | ui = relationship('RepoRhodeCodeUi', cascade="all") |
|
1333 | 1333 | settings = relationship('RepoRhodeCodeSetting', cascade="all") |
|
1334 | 1334 | integrations = relationship('Integration', |
|
1335 | 1335 | cascade="all, delete, delete-orphan") |
|
1336 | 1336 | |
|
1337 | 1337 | def __unicode__(self): |
|
1338 | 1338 | return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id, |
|
1339 | 1339 | safe_unicode(self.repo_name)) |
|
1340 | 1340 | |
|
1341 | 1341 | @hybrid_property |
|
1342 | 1342 | def landing_rev(self): |
|
1343 | 1343 | # always should return [rev_type, rev] |
|
1344 | 1344 | if self._landing_revision: |
|
1345 | 1345 | _rev_info = self._landing_revision.split(':') |
|
1346 | 1346 | if len(_rev_info) < 2: |
|
1347 | 1347 | _rev_info.insert(0, 'rev') |
|
1348 | 1348 | return [_rev_info[0], _rev_info[1]] |
|
1349 | 1349 | return [None, None] |
|
1350 | 1350 | |
|
1351 | 1351 | @landing_rev.setter |
|
1352 | 1352 | def landing_rev(self, val): |
|
1353 | 1353 | if ':' not in val: |
|
1354 | 1354 | raise ValueError('value must be delimited with `:` and consist ' |
|
1355 | 1355 | 'of <rev_type>:<rev>, got %s instead' % val) |
|
1356 | 1356 | self._landing_revision = val |
|
1357 | 1357 | |
|
1358 | 1358 | @hybrid_property |
|
1359 | 1359 | def locked(self): |
|
1360 | 1360 | if self._locked: |
|
1361 | 1361 | user_id, timelocked, reason = self._locked.split(':') |
|
1362 | 1362 | lock_values = int(user_id), timelocked, reason |
|
1363 | 1363 | else: |
|
1364 | 1364 | lock_values = [None, None, None] |
|
1365 | 1365 | return lock_values |
|
1366 | 1366 | |
|
1367 | 1367 | @locked.setter |
|
1368 | 1368 | def locked(self, val): |
|
1369 | 1369 | if val and isinstance(val, (list, tuple)): |
|
1370 | 1370 | self._locked = ':'.join(map(str, val)) |
|
1371 | 1371 | else: |
|
1372 | 1372 | self._locked = None |
|
1373 | 1373 | |
|
1374 | 1374 | @hybrid_property |
|
1375 | 1375 | def changeset_cache(self): |
|
1376 | 1376 | from rhodecode.lib.vcs.backends.base import EmptyCommit |
|
1377 | 1377 | dummy = EmptyCommit().__json__() |
|
1378 | 1378 | if not self._changeset_cache: |
|
1379 | 1379 | return dummy |
|
1380 | 1380 | try: |
|
1381 | 1381 | return json.loads(self._changeset_cache) |
|
1382 | 1382 | except TypeError: |
|
1383 | 1383 | return dummy |
|
1384 | 1384 | except Exception: |
|
1385 | 1385 | log.error(traceback.format_exc()) |
|
1386 | 1386 | return dummy |
|
1387 | 1387 | |
|
1388 | 1388 | @changeset_cache.setter |
|
1389 | 1389 | def changeset_cache(self, val): |
|
1390 | 1390 | try: |
|
1391 | 1391 | self._changeset_cache = json.dumps(val) |
|
1392 | 1392 | except Exception: |
|
1393 | 1393 | log.error(traceback.format_exc()) |
|
1394 | 1394 | |
|
1395 | 1395 | @hybrid_property |
|
1396 | 1396 | def repo_name(self): |
|
1397 | 1397 | return self._repo_name |
|
1398 | 1398 | |
|
1399 | 1399 | @repo_name.setter |
|
1400 | 1400 | def repo_name(self, value): |
|
1401 | 1401 | self._repo_name = value |
|
1402 | 1402 | self._repo_name_hash = hashlib.sha1(safe_str(value)).hexdigest() |
|
1403 | 1403 | |
|
1404 | 1404 | @classmethod |
|
1405 | 1405 | def normalize_repo_name(cls, repo_name): |
|
1406 | 1406 | """ |
|
1407 | 1407 | Normalizes os specific repo_name to the format internally stored inside |
|
1408 | 1408 | database using URL_SEP |
|
1409 | 1409 | |
|
1410 | 1410 | :param cls: |
|
1411 | 1411 | :param repo_name: |
|
1412 | 1412 | """ |
|
1413 | 1413 | return cls.NAME_SEP.join(repo_name.split(os.sep)) |
|
1414 | 1414 | |
|
1415 | 1415 | @classmethod |
|
1416 | 1416 | def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False): |
|
1417 | 1417 | session = Session() |
|
1418 | 1418 | q = session.query(cls).filter(cls.repo_name == repo_name) |
|
1419 | 1419 | |
|
1420 | 1420 | if cache: |
|
1421 | 1421 | if identity_cache: |
|
1422 | 1422 | val = cls.identity_cache(session, 'repo_name', repo_name) |
|
1423 | 1423 | if val: |
|
1424 | 1424 | return val |
|
1425 | 1425 | else: |
|
1426 | 1426 | q = q.options( |
|
1427 | 1427 | FromCache("sql_cache_short", |
|
1428 | 1428 | "get_repo_by_name_%s" % _hash_key(repo_name))) |
|
1429 | 1429 | |
|
1430 | 1430 | return q.scalar() |
|
1431 | 1431 | |
|
1432 | 1432 | @classmethod |
|
1433 | 1433 | def get_by_full_path(cls, repo_full_path): |
|
1434 | 1434 | repo_name = repo_full_path.split(cls.base_path(), 1)[-1] |
|
1435 | 1435 | repo_name = cls.normalize_repo_name(repo_name) |
|
1436 | 1436 | return cls.get_by_repo_name(repo_name.strip(URL_SEP)) |
|
1437 | 1437 | |
|
1438 | 1438 | @classmethod |
|
1439 | 1439 | def get_repo_forks(cls, repo_id): |
|
1440 | 1440 | return cls.query().filter(Repository.fork_id == repo_id) |
|
1441 | 1441 | |
|
1442 | 1442 | @classmethod |
|
1443 | 1443 | def base_path(cls): |
|
1444 | 1444 | """ |
|
1445 | 1445 | Returns base path when all repos are stored |
|
1446 | 1446 | |
|
1447 | 1447 | :param cls: |
|
1448 | 1448 | """ |
|
1449 | 1449 | q = Session().query(RhodeCodeUi)\ |
|
1450 | 1450 | .filter(RhodeCodeUi.ui_key == cls.NAME_SEP) |
|
1451 | 1451 | q = q.options(FromCache("sql_cache_short", "repository_repo_path")) |
|
1452 | 1452 | return q.one().ui_value |
|
1453 | 1453 | |
|
1454 | 1454 | @classmethod |
|
1455 | 1455 | def is_valid(cls, repo_name): |
|
1456 | 1456 | """ |
|
1457 | 1457 | returns True if given repo name is a valid filesystem repository |
|
1458 | 1458 | |
|
1459 | 1459 | :param cls: |
|
1460 | 1460 | :param repo_name: |
|
1461 | 1461 | """ |
|
1462 | 1462 | from rhodecode.lib.utils import is_valid_repo |
|
1463 | 1463 | |
|
1464 | 1464 | return is_valid_repo(repo_name, cls.base_path()) |
|
1465 | 1465 | |
|
1466 | 1466 | @classmethod |
|
1467 | 1467 | def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None), |
|
1468 | 1468 | case_insensitive=True): |
|
1469 | 1469 | q = Repository.query() |
|
1470 | 1470 | |
|
1471 | 1471 | if not isinstance(user_id, Optional): |
|
1472 | 1472 | q = q.filter(Repository.user_id == user_id) |
|
1473 | 1473 | |
|
1474 | 1474 | if not isinstance(group_id, Optional): |
|
1475 | 1475 | q = q.filter(Repository.group_id == group_id) |
|
1476 | 1476 | |
|
1477 | 1477 | if case_insensitive: |
|
1478 | 1478 | q = q.order_by(func.lower(Repository.repo_name)) |
|
1479 | 1479 | else: |
|
1480 | 1480 | q = q.order_by(Repository.repo_name) |
|
1481 | 1481 | return q.all() |
|
1482 | 1482 | |
|
1483 | 1483 | @property |
|
1484 | 1484 | def forks(self): |
|
1485 | 1485 | """ |
|
1486 | 1486 | Return forks of this repo |
|
1487 | 1487 | """ |
|
1488 | 1488 | return Repository.get_repo_forks(self.repo_id) |
|
1489 | 1489 | |
|
1490 | 1490 | @property |
|
1491 | 1491 | def parent(self): |
|
1492 | 1492 | """ |
|
1493 | 1493 | Returns fork parent |
|
1494 | 1494 | """ |
|
1495 | 1495 | return self.fork |
|
1496 | 1496 | |
|
1497 | 1497 | @property |
|
1498 | 1498 | def just_name(self): |
|
1499 | 1499 | return self.repo_name.split(self.NAME_SEP)[-1] |
|
1500 | 1500 | |
|
1501 | 1501 | @property |
|
1502 | 1502 | def groups_with_parents(self): |
|
1503 | 1503 | groups = [] |
|
1504 | 1504 | if self.group is None: |
|
1505 | 1505 | return groups |
|
1506 | 1506 | |
|
1507 | 1507 | cur_gr = self.group |
|
1508 | 1508 | groups.insert(0, cur_gr) |
|
1509 | 1509 | while 1: |
|
1510 | 1510 | gr = getattr(cur_gr, 'parent_group', None) |
|
1511 | 1511 | cur_gr = cur_gr.parent_group |
|
1512 | 1512 | if gr is None: |
|
1513 | 1513 | break |
|
1514 | 1514 | groups.insert(0, gr) |
|
1515 | 1515 | |
|
1516 | 1516 | return groups |
|
1517 | 1517 | |
|
1518 | 1518 | @property |
|
1519 | 1519 | def groups_and_repo(self): |
|
1520 | 1520 | return self.groups_with_parents, self |
|
1521 | 1521 | |
|
1522 | 1522 | @LazyProperty |
|
1523 | 1523 | def repo_path(self): |
|
1524 | 1524 | """ |
|
1525 | 1525 | Returns base full path for that repository means where it actually |
|
1526 | 1526 | exists on a filesystem |
|
1527 | 1527 | """ |
|
1528 | 1528 | q = Session().query(RhodeCodeUi).filter( |
|
1529 | 1529 | RhodeCodeUi.ui_key == self.NAME_SEP) |
|
1530 | 1530 | q = q.options(FromCache("sql_cache_short", "repository_repo_path")) |
|
1531 | 1531 | return q.one().ui_value |
|
1532 | 1532 | |
|
1533 | 1533 | @property |
|
1534 | 1534 | def repo_full_path(self): |
|
1535 | 1535 | p = [self.repo_path] |
|
1536 | 1536 | # we need to split the name by / since this is how we store the |
|
1537 | 1537 | # names in the database, but that eventually needs to be converted |
|
1538 | 1538 | # into a valid system path |
|
1539 | 1539 | p += self.repo_name.split(self.NAME_SEP) |
|
1540 | 1540 | return os.path.join(*map(safe_unicode, p)) |
|
1541 | 1541 | |
|
1542 | 1542 | @property |
|
1543 | 1543 | def cache_keys(self): |
|
1544 | 1544 | """ |
|
1545 | 1545 | Returns associated cache keys for that repo |
|
1546 | 1546 | """ |
|
1547 | 1547 | return CacheKey.query()\ |
|
1548 | 1548 | .filter(CacheKey.cache_args == self.repo_name)\ |
|
1549 | 1549 | .order_by(CacheKey.cache_key)\ |
|
1550 | 1550 | .all() |
|
1551 | 1551 | |
|
1552 | 1552 | def get_new_name(self, repo_name): |
|
1553 | 1553 | """ |
|
1554 | 1554 | returns new full repository name based on assigned group and new new |
|
1555 | 1555 | |
|
1556 | 1556 | :param group_name: |
|
1557 | 1557 | """ |
|
1558 | 1558 | path_prefix = self.group.full_path_splitted if self.group else [] |
|
1559 | 1559 | return self.NAME_SEP.join(path_prefix + [repo_name]) |
|
1560 | 1560 | |
|
1561 | 1561 | @property |
|
1562 | 1562 | def _config(self): |
|
1563 | 1563 | """ |
|
1564 | 1564 | Returns db based config object. |
|
1565 | 1565 | """ |
|
1566 | 1566 | from rhodecode.lib.utils import make_db_config |
|
1567 | 1567 | return make_db_config(clear_session=False, repo=self) |
|
1568 | 1568 | |
|
1569 | 1569 | def permissions(self, with_admins=True, with_owner=True): |
|
1570 | 1570 | q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self) |
|
1571 | 1571 | q = q.options(joinedload(UserRepoToPerm.repository), |
|
1572 | 1572 | joinedload(UserRepoToPerm.user), |
|
1573 | 1573 | joinedload(UserRepoToPerm.permission),) |
|
1574 | 1574 | |
|
1575 | 1575 | # get owners and admins and permissions. We do a trick of re-writing |
|
1576 | 1576 | # objects from sqlalchemy to named-tuples due to sqlalchemy session |
|
1577 | 1577 | # has a global reference and changing one object propagates to all |
|
1578 | 1578 | # others. This means if admin is also an owner admin_row that change |
|
1579 | 1579 | # would propagate to both objects |
|
1580 | 1580 | perm_rows = [] |
|
1581 | 1581 | for _usr in q.all(): |
|
1582 | 1582 | usr = AttributeDict(_usr.user.get_dict()) |
|
1583 | 1583 | usr.permission = _usr.permission.permission_name |
|
1584 | 1584 | perm_rows.append(usr) |
|
1585 | 1585 | |
|
1586 | 1586 | # filter the perm rows by 'default' first and then sort them by |
|
1587 | 1587 | # admin,write,read,none permissions sorted again alphabetically in |
|
1588 | 1588 | # each group |
|
1589 | 1589 | perm_rows = sorted(perm_rows, key=display_sort) |
|
1590 | 1590 | |
|
1591 | 1591 | _admin_perm = 'repository.admin' |
|
1592 | 1592 | owner_row = [] |
|
1593 | 1593 | if with_owner: |
|
1594 | 1594 | usr = AttributeDict(self.user.get_dict()) |
|
1595 | 1595 | usr.owner_row = True |
|
1596 | 1596 | usr.permission = _admin_perm |
|
1597 | 1597 | owner_row.append(usr) |
|
1598 | 1598 | |
|
1599 | 1599 | super_admin_rows = [] |
|
1600 | 1600 | if with_admins: |
|
1601 | 1601 | for usr in User.get_all_super_admins(): |
|
1602 | 1602 | # if this admin is also owner, don't double the record |
|
1603 | 1603 | if usr.user_id == owner_row[0].user_id: |
|
1604 | 1604 | owner_row[0].admin_row = True |
|
1605 | 1605 | else: |
|
1606 | 1606 | usr = AttributeDict(usr.get_dict()) |
|
1607 | 1607 | usr.admin_row = True |
|
1608 | 1608 | usr.permission = _admin_perm |
|
1609 | 1609 | super_admin_rows.append(usr) |
|
1610 | 1610 | |
|
1611 | 1611 | return super_admin_rows + owner_row + perm_rows |
|
1612 | 1612 | |
|
1613 | 1613 | def permission_user_groups(self): |
|
1614 | 1614 | q = UserGroupRepoToPerm.query().filter( |
|
1615 | 1615 | UserGroupRepoToPerm.repository == self) |
|
1616 | 1616 | q = q.options(joinedload(UserGroupRepoToPerm.repository), |
|
1617 | 1617 | joinedload(UserGroupRepoToPerm.users_group), |
|
1618 | 1618 | joinedload(UserGroupRepoToPerm.permission),) |
|
1619 | 1619 | |
|
1620 | 1620 | perm_rows = [] |
|
1621 | 1621 | for _user_group in q.all(): |
|
1622 | 1622 | usr = AttributeDict(_user_group.users_group.get_dict()) |
|
1623 | 1623 | usr.permission = _user_group.permission.permission_name |
|
1624 | 1624 | perm_rows.append(usr) |
|
1625 | 1625 | |
|
1626 | 1626 | return perm_rows |
|
1627 | 1627 | |
|
1628 | 1628 | def get_api_data(self, include_secrets=False): |
|
1629 | 1629 | """ |
|
1630 | 1630 | Common function for generating repo api data |
|
1631 | 1631 | |
|
1632 | 1632 | :param include_secrets: See :meth:`User.get_api_data`. |
|
1633 | 1633 | |
|
1634 | 1634 | """ |
|
1635 | 1635 | # TODO: mikhail: Here there is an anti-pattern, we probably need to |
|
1636 | 1636 | # move this methods on models level. |
|
1637 | 1637 | from rhodecode.model.settings import SettingsModel |
|
1638 | 1638 | |
|
1639 | 1639 | repo = self |
|
1640 | 1640 | _user_id, _time, _reason = self.locked |
|
1641 | 1641 | |
|
1642 | 1642 | data = { |
|
1643 | 1643 | 'repo_id': repo.repo_id, |
|
1644 | 1644 | 'repo_name': repo.repo_name, |
|
1645 | 1645 | 'repo_type': repo.repo_type, |
|
1646 | 1646 | 'clone_uri': repo.clone_uri or '', |
|
1647 | 1647 | 'url': url('summary_home', repo_name=self.repo_name, qualified=True), |
|
1648 | 1648 | 'private': repo.private, |
|
1649 | 1649 | 'created_on': repo.created_on, |
|
1650 | 1650 | 'description': repo.description, |
|
1651 | 1651 | 'landing_rev': repo.landing_rev, |
|
1652 | 1652 | 'owner': repo.user.username, |
|
1653 | 1653 | 'fork_of': repo.fork.repo_name if repo.fork else None, |
|
1654 | 1654 | 'enable_statistics': repo.enable_statistics, |
|
1655 | 1655 | 'enable_locking': repo.enable_locking, |
|
1656 | 1656 | 'enable_downloads': repo.enable_downloads, |
|
1657 | 1657 | 'last_changeset': repo.changeset_cache, |
|
1658 | 1658 | 'locked_by': User.get(_user_id).get_api_data( |
|
1659 | 1659 | include_secrets=include_secrets) if _user_id else None, |
|
1660 | 1660 | 'locked_date': time_to_datetime(_time) if _time else None, |
|
1661 | 1661 | 'lock_reason': _reason if _reason else None, |
|
1662 | 1662 | } |
|
1663 | 1663 | |
|
1664 | 1664 | # TODO: mikhail: should be per-repo settings here |
|
1665 | 1665 | rc_config = SettingsModel().get_all_settings() |
|
1666 | 1666 | repository_fields = str2bool( |
|
1667 | 1667 | rc_config.get('rhodecode_repository_fields')) |
|
1668 | 1668 | if repository_fields: |
|
1669 | 1669 | for f in self.extra_fields: |
|
1670 | 1670 | data[f.field_key_prefixed] = f.field_value |
|
1671 | 1671 | |
|
1672 | 1672 | return data |
|
1673 | 1673 | |
|
1674 | 1674 | @classmethod |
|
1675 | 1675 | def lock(cls, repo, user_id, lock_time=None, lock_reason=None): |
|
1676 | 1676 | if not lock_time: |
|
1677 | 1677 | lock_time = time.time() |
|
1678 | 1678 | if not lock_reason: |
|
1679 | 1679 | lock_reason = cls.LOCK_AUTOMATIC |
|
1680 | 1680 | repo.locked = [user_id, lock_time, lock_reason] |
|
1681 | 1681 | Session().add(repo) |
|
1682 | 1682 | Session().commit() |
|
1683 | 1683 | |
|
1684 | 1684 | @classmethod |
|
1685 | 1685 | def unlock(cls, repo): |
|
1686 | 1686 | repo.locked = None |
|
1687 | 1687 | Session().add(repo) |
|
1688 | 1688 | Session().commit() |
|
1689 | 1689 | |
|
1690 | 1690 | @classmethod |
|
1691 | 1691 | def getlock(cls, repo): |
|
1692 | 1692 | return repo.locked |
|
1693 | 1693 | |
|
1694 | 1694 | def is_user_lock(self, user_id): |
|
1695 | 1695 | if self.lock[0]: |
|
1696 | 1696 | lock_user_id = safe_int(self.lock[0]) |
|
1697 | 1697 | user_id = safe_int(user_id) |
|
1698 | 1698 | # both are ints, and they are equal |
|
1699 | 1699 | return all([lock_user_id, user_id]) and lock_user_id == user_id |
|
1700 | 1700 | |
|
1701 | 1701 | return False |
|
1702 | 1702 | |
|
1703 | 1703 | def get_locking_state(self, action, user_id, only_when_enabled=True): |
|
1704 | 1704 | """ |
|
1705 | 1705 | Checks locking on this repository, if locking is enabled and lock is |
|
1706 | 1706 | present returns a tuple of make_lock, locked, locked_by. |
|
1707 | 1707 | make_lock can have 3 states None (do nothing) True, make lock |
|
1708 | 1708 | False release lock, This value is later propagated to hooks, which |
|
1709 | 1709 | do the locking. Think about this as signals passed to hooks what to do. |
|
1710 | 1710 | |
|
1711 | 1711 | """ |
|
1712 | 1712 | # TODO: johbo: This is part of the business logic and should be moved |
|
1713 | 1713 | # into the RepositoryModel. |
|
1714 | 1714 | |
|
1715 | 1715 | if action not in ('push', 'pull'): |
|
1716 | 1716 | raise ValueError("Invalid action value: %s" % repr(action)) |
|
1717 | 1717 | |
|
1718 | 1718 | # defines if locked error should be thrown to user |
|
1719 | 1719 | currently_locked = False |
|
1720 | 1720 | # defines if new lock should be made, tri-state |
|
1721 | 1721 | make_lock = None |
|
1722 | 1722 | repo = self |
|
1723 | 1723 | user = User.get(user_id) |
|
1724 | 1724 | |
|
1725 | 1725 | lock_info = repo.locked |
|
1726 | 1726 | |
|
1727 | 1727 | if repo and (repo.enable_locking or not only_when_enabled): |
|
1728 | 1728 | if action == 'push': |
|
1729 | 1729 | # check if it's already locked !, if it is compare users |
|
1730 | 1730 | locked_by_user_id = lock_info[0] |
|
1731 | 1731 | if user.user_id == locked_by_user_id: |
|
1732 | 1732 | log.debug( |
|
1733 | 1733 | 'Got `push` action from user %s, now unlocking', user) |
|
1734 | 1734 | # unlock if we have push from user who locked |
|
1735 | 1735 | make_lock = False |
|
1736 | 1736 | else: |
|
1737 | 1737 | # we're not the same user who locked, ban with |
|
1738 | 1738 | # code defined in settings (default is 423 HTTP Locked) ! |
|
1739 | 1739 | log.debug('Repo %s is currently locked by %s', repo, user) |
|
1740 | 1740 | currently_locked = True |
|
1741 | 1741 | elif action == 'pull': |
|
1742 | 1742 | # [0] user [1] date |
|
1743 | 1743 | if lock_info[0] and lock_info[1]: |
|
1744 | 1744 | log.debug('Repo %s is currently locked by %s', repo, user) |
|
1745 | 1745 | currently_locked = True |
|
1746 | 1746 | else: |
|
1747 | 1747 | log.debug('Setting lock on repo %s by %s', repo, user) |
|
1748 | 1748 | make_lock = True |
|
1749 | 1749 | |
|
1750 | 1750 | else: |
|
1751 | 1751 | log.debug('Repository %s do not have locking enabled', repo) |
|
1752 | 1752 | |
|
1753 | 1753 | log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s', |
|
1754 | 1754 | make_lock, currently_locked, lock_info) |
|
1755 | 1755 | |
|
1756 | 1756 | from rhodecode.lib.auth import HasRepoPermissionAny |
|
1757 | 1757 | perm_check = HasRepoPermissionAny('repository.write', 'repository.admin') |
|
1758 | 1758 | if make_lock and not perm_check(repo_name=repo.repo_name, user=user): |
|
1759 | 1759 | # if we don't have at least write permission we cannot make a lock |
|
1760 | 1760 | log.debug('lock state reset back to FALSE due to lack ' |
|
1761 | 1761 | 'of at least read permission') |
|
1762 | 1762 | make_lock = False |
|
1763 | 1763 | |
|
1764 | 1764 | return make_lock, currently_locked, lock_info |
|
1765 | 1765 | |
|
1766 | 1766 | @property |
|
1767 | 1767 | def last_db_change(self): |
|
1768 | 1768 | return self.updated_on |
|
1769 | 1769 | |
|
1770 | 1770 | @property |
|
1771 | 1771 | def clone_uri_hidden(self): |
|
1772 | 1772 | clone_uri = self.clone_uri |
|
1773 | 1773 | if clone_uri: |
|
1774 | 1774 | import urlobject |
|
1775 | 1775 | url_obj = urlobject.URLObject(clone_uri) |
|
1776 | 1776 | if url_obj.password: |
|
1777 | 1777 | clone_uri = url_obj.with_password('*****') |
|
1778 | 1778 | return clone_uri |
|
1779 | 1779 | |
|
1780 | 1780 | def clone_url(self, **override): |
|
1781 | 1781 | qualified_home_url = url('home', qualified=True) |
|
1782 | 1782 | |
|
1783 | 1783 | uri_tmpl = None |
|
1784 | 1784 | if 'with_id' in override: |
|
1785 | 1785 | uri_tmpl = self.DEFAULT_CLONE_URI_ID |
|
1786 | 1786 | del override['with_id'] |
|
1787 | 1787 | |
|
1788 | 1788 | if 'uri_tmpl' in override: |
|
1789 | 1789 | uri_tmpl = override['uri_tmpl'] |
|
1790 | 1790 | del override['uri_tmpl'] |
|
1791 | 1791 | |
|
1792 | 1792 | # we didn't override our tmpl from **overrides |
|
1793 | 1793 | if not uri_tmpl: |
|
1794 | 1794 | uri_tmpl = self.DEFAULT_CLONE_URI |
|
1795 | 1795 | try: |
|
1796 | 1796 | from pylons import tmpl_context as c |
|
1797 | 1797 | uri_tmpl = c.clone_uri_tmpl |
|
1798 | 1798 | except Exception: |
|
1799 | 1799 | # in any case if we call this outside of request context, |
|
1800 | 1800 | # ie, not having tmpl_context set up |
|
1801 | 1801 | pass |
|
1802 | 1802 | |
|
1803 | 1803 | return get_clone_url(uri_tmpl=uri_tmpl, |
|
1804 | 1804 | qualifed_home_url=qualified_home_url, |
|
1805 | 1805 | repo_name=self.repo_name, |
|
1806 | 1806 | repo_id=self.repo_id, **override) |
|
1807 | 1807 | |
|
1808 | 1808 | def set_state(self, state): |
|
1809 | 1809 | self.repo_state = state |
|
1810 | 1810 | Session().add(self) |
|
1811 | 1811 | #========================================================================== |
|
1812 | 1812 | # SCM PROPERTIES |
|
1813 | 1813 | #========================================================================== |
|
1814 | 1814 | |
|
1815 | 1815 | def get_commit(self, commit_id=None, commit_idx=None, pre_load=None): |
|
1816 | 1816 | return get_commit_safe( |
|
1817 | 1817 | self.scm_instance(), commit_id, commit_idx, pre_load=pre_load) |
|
1818 | 1818 | |
|
1819 | 1819 | def get_changeset(self, rev=None, pre_load=None): |
|
1820 | 1820 | warnings.warn("Use get_commit", DeprecationWarning) |
|
1821 | 1821 | commit_id = None |
|
1822 | 1822 | commit_idx = None |
|
1823 | 1823 | if isinstance(rev, basestring): |
|
1824 | 1824 | commit_id = rev |
|
1825 | 1825 | else: |
|
1826 | 1826 | commit_idx = rev |
|
1827 | 1827 | return self.get_commit(commit_id=commit_id, commit_idx=commit_idx, |
|
1828 | 1828 | pre_load=pre_load) |
|
1829 | 1829 | |
|
1830 | 1830 | def get_landing_commit(self): |
|
1831 | 1831 | """ |
|
1832 | 1832 | Returns landing commit, or if that doesn't exist returns the tip |
|
1833 | 1833 | """ |
|
1834 | 1834 | _rev_type, _rev = self.landing_rev |
|
1835 | 1835 | commit = self.get_commit(_rev) |
|
1836 | 1836 | if isinstance(commit, EmptyCommit): |
|
1837 | 1837 | return self.get_commit() |
|
1838 | 1838 | return commit |
|
1839 | 1839 | |
|
1840 | 1840 | def update_commit_cache(self, cs_cache=None, config=None): |
|
1841 | 1841 | """ |
|
1842 | 1842 | Update cache of last changeset for repository, keys should be:: |
|
1843 | 1843 | |
|
1844 | 1844 | short_id |
|
1845 | 1845 | raw_id |
|
1846 | 1846 | revision |
|
1847 | 1847 | parents |
|
1848 | 1848 | message |
|
1849 | 1849 | date |
|
1850 | 1850 | author |
|
1851 | 1851 | |
|
1852 | 1852 | :param cs_cache: |
|
1853 | 1853 | """ |
|
1854 | 1854 | from rhodecode.lib.vcs.backends.base import BaseChangeset |
|
1855 | 1855 | if cs_cache is None: |
|
1856 | 1856 | # use no-cache version here |
|
1857 | 1857 | scm_repo = self.scm_instance(cache=False, config=config) |
|
1858 | 1858 | if scm_repo: |
|
1859 | 1859 | cs_cache = scm_repo.get_commit( |
|
1860 | 1860 | pre_load=["author", "date", "message", "parents"]) |
|
1861 | 1861 | else: |
|
1862 | 1862 | cs_cache = EmptyCommit() |
|
1863 | 1863 | |
|
1864 | 1864 | if isinstance(cs_cache, BaseChangeset): |
|
1865 | 1865 | cs_cache = cs_cache.__json__() |
|
1866 | 1866 | |
|
1867 | 1867 | def is_outdated(new_cs_cache): |
|
1868 | 1868 | if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or |
|
1869 | 1869 | new_cs_cache['revision'] != self.changeset_cache['revision']): |
|
1870 | 1870 | return True |
|
1871 | 1871 | return False |
|
1872 | 1872 | |
|
1873 | 1873 | # check if we have maybe already latest cached revision |
|
1874 | 1874 | if is_outdated(cs_cache) or not self.changeset_cache: |
|
1875 | 1875 | _default = datetime.datetime.fromtimestamp(0) |
|
1876 | 1876 | last_change = cs_cache.get('date') or _default |
|
1877 | 1877 | log.debug('updated repo %s with new cs cache %s', |
|
1878 | 1878 | self.repo_name, cs_cache) |
|
1879 | 1879 | self.updated_on = last_change |
|
1880 | 1880 | self.changeset_cache = cs_cache |
|
1881 | 1881 | Session().add(self) |
|
1882 | 1882 | Session().commit() |
|
1883 | 1883 | else: |
|
1884 | 1884 | log.debug('Skipping update_commit_cache for repo:`%s` ' |
|
1885 | 1885 | 'commit already with latest changes', self.repo_name) |
|
1886 | 1886 | |
|
1887 | 1887 | @property |
|
1888 | 1888 | def tip(self): |
|
1889 | 1889 | return self.get_commit('tip') |
|
1890 | 1890 | |
|
1891 | 1891 | @property |
|
1892 | 1892 | def author(self): |
|
1893 | 1893 | return self.tip.author |
|
1894 | 1894 | |
|
1895 | 1895 | @property |
|
1896 | 1896 | def last_change(self): |
|
1897 | 1897 | return self.scm_instance().last_change |
|
1898 | 1898 | |
|
1899 | 1899 | def get_comments(self, revisions=None): |
|
1900 | 1900 | """ |
|
1901 | 1901 | Returns comments for this repository grouped by revisions |
|
1902 | 1902 | |
|
1903 | 1903 | :param revisions: filter query by revisions only |
|
1904 | 1904 | """ |
|
1905 | 1905 | cmts = ChangesetComment.query()\ |
|
1906 | 1906 | .filter(ChangesetComment.repo == self) |
|
1907 | 1907 | if revisions: |
|
1908 | 1908 | cmts = cmts.filter(ChangesetComment.revision.in_(revisions)) |
|
1909 | 1909 | grouped = collections.defaultdict(list) |
|
1910 | 1910 | for cmt in cmts.all(): |
|
1911 | 1911 | grouped[cmt.revision].append(cmt) |
|
1912 | 1912 | return grouped |
|
1913 | 1913 | |
|
1914 | 1914 | def statuses(self, revisions=None): |
|
1915 | 1915 | """ |
|
1916 | 1916 | Returns statuses for this repository |
|
1917 | 1917 | |
|
1918 | 1918 | :param revisions: list of revisions to get statuses for |
|
1919 | 1919 | """ |
|
1920 | 1920 | statuses = ChangesetStatus.query()\ |
|
1921 | 1921 | .filter(ChangesetStatus.repo == self)\ |
|
1922 | 1922 | .filter(ChangesetStatus.version == 0) |
|
1923 | 1923 | |
|
1924 | 1924 | if revisions: |
|
1925 | 1925 | # Try doing the filtering in chunks to avoid hitting limits |
|
1926 | 1926 | size = 500 |
|
1927 | 1927 | status_results = [] |
|
1928 | 1928 | for chunk in xrange(0, len(revisions), size): |
|
1929 | 1929 | status_results += statuses.filter( |
|
1930 | 1930 | ChangesetStatus.revision.in_( |
|
1931 | 1931 | revisions[chunk: chunk+size]) |
|
1932 | 1932 | ).all() |
|
1933 | 1933 | else: |
|
1934 | 1934 | status_results = statuses.all() |
|
1935 | 1935 | |
|
1936 | 1936 | grouped = {} |
|
1937 | 1937 | |
|
1938 | 1938 | # maybe we have open new pullrequest without a status? |
|
1939 | 1939 | stat = ChangesetStatus.STATUS_UNDER_REVIEW |
|
1940 | 1940 | status_lbl = ChangesetStatus.get_status_lbl(stat) |
|
1941 | 1941 | for pr in PullRequest.query().filter(PullRequest.source_repo == self).all(): |
|
1942 | 1942 | for rev in pr.revisions: |
|
1943 | 1943 | pr_id = pr.pull_request_id |
|
1944 | 1944 | pr_repo = pr.target_repo.repo_name |
|
1945 | 1945 | grouped[rev] = [stat, status_lbl, pr_id, pr_repo] |
|
1946 | 1946 | |
|
1947 | 1947 | for stat in status_results: |
|
1948 | 1948 | pr_id = pr_repo = None |
|
1949 | 1949 | if stat.pull_request: |
|
1950 | 1950 | pr_id = stat.pull_request.pull_request_id |
|
1951 | 1951 | pr_repo = stat.pull_request.target_repo.repo_name |
|
1952 | 1952 | grouped[stat.revision] = [str(stat.status), stat.status_lbl, |
|
1953 | 1953 | pr_id, pr_repo] |
|
1954 | 1954 | return grouped |
|
1955 | 1955 | |
|
1956 | 1956 | # ========================================================================== |
|
1957 | 1957 | # SCM CACHE INSTANCE |
|
1958 | 1958 | # ========================================================================== |
|
1959 | 1959 | |
|
1960 | 1960 | def scm_instance(self, **kwargs): |
|
1961 | 1961 | import rhodecode |
|
1962 | 1962 | |
|
1963 | 1963 | # Passing a config will not hit the cache currently only used |
|
1964 | 1964 | # for repo2dbmapper |
|
1965 | 1965 | config = kwargs.pop('config', None) |
|
1966 | 1966 | cache = kwargs.pop('cache', None) |
|
1967 | 1967 | full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache')) |
|
1968 | 1968 | # if cache is NOT defined use default global, else we have a full |
|
1969 | 1969 | # control over cache behaviour |
|
1970 | 1970 | if cache is None and full_cache and not config: |
|
1971 | 1971 | return self._get_instance_cached() |
|
1972 | 1972 | return self._get_instance(cache=bool(cache), config=config) |
|
1973 | 1973 | |
|
1974 | 1974 | def _get_instance_cached(self): |
|
1975 | 1975 | @cache_region('long_term') |
|
1976 | 1976 | def _get_repo(cache_key): |
|
1977 | 1977 | return self._get_instance() |
|
1978 | 1978 | |
|
1979 | 1979 | invalidator_context = CacheKey.repo_context_cache( |
|
1980 | 1980 | _get_repo, self.repo_name, None, thread_scoped=True) |
|
1981 | 1981 | |
|
1982 | 1982 | with invalidator_context as context: |
|
1983 | 1983 | context.invalidate() |
|
1984 | 1984 | repo = context.compute() |
|
1985 | 1985 | |
|
1986 | 1986 | return repo |
|
1987 | 1987 | |
|
1988 | 1988 | def _get_instance(self, cache=True, config=None): |
|
1989 | 1989 | config = config or self._config |
|
1990 | 1990 | custom_wire = { |
|
1991 | 1991 | 'cache': cache # controls the vcs.remote cache |
|
1992 | 1992 | } |
|
1993 | 1993 | |
|
1994 | 1994 | repo = get_vcs_instance( |
|
1995 | 1995 | repo_path=safe_str(self.repo_full_path), |
|
1996 | 1996 | config=config, |
|
1997 | 1997 | with_wire=custom_wire, |
|
1998 | 1998 | create=False) |
|
1999 | 1999 | |
|
2000 | 2000 | return repo |
|
2001 | 2001 | |
|
2002 | 2002 | def __json__(self): |
|
2003 | 2003 | return {'landing_rev': self.landing_rev} |
|
2004 | 2004 | |
|
2005 | 2005 | def get_dict(self): |
|
2006 | 2006 | |
|
2007 | 2007 | # Since we transformed `repo_name` to a hybrid property, we need to |
|
2008 | 2008 | # keep compatibility with the code which uses `repo_name` field. |
|
2009 | 2009 | |
|
2010 | 2010 | result = super(Repository, self).get_dict() |
|
2011 | 2011 | result['repo_name'] = result.pop('_repo_name', None) |
|
2012 | 2012 | return result |
|
2013 | 2013 | |
|
2014 | 2014 | |
|
2015 | 2015 | class RepoGroup(Base, BaseModel): |
|
2016 | 2016 | __tablename__ = 'groups' |
|
2017 | 2017 | __table_args__ = ( |
|
2018 | 2018 | UniqueConstraint('group_name', 'group_parent_id'), |
|
2019 | 2019 | CheckConstraint('group_id != group_parent_id'), |
|
2020 | 2020 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
2021 | 2021 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, |
|
2022 | 2022 | ) |
|
2023 | 2023 | __mapper_args__ = {'order_by': 'group_name'} |
|
2024 | 2024 | |
|
2025 | 2025 | CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups |
|
2026 | 2026 | |
|
2027 | 2027 | group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
2028 | 2028 | group_name = Column("group_name", String(255), nullable=False, unique=True, default=None) |
|
2029 | 2029 | group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None) |
|
2030 | 2030 | group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None) |
|
2031 | 2031 | enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False) |
|
2032 | 2032 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None) |
|
2033 | 2033 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
2034 | 2034 | |
|
2035 | 2035 | repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id') |
|
2036 | 2036 | users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all') |
|
2037 | 2037 | parent_group = relationship('RepoGroup', remote_side=group_id) |
|
2038 | 2038 | user = relationship('User') |
|
2039 | 2039 | integrations = relationship('Integration', |
|
2040 | 2040 | cascade="all, delete, delete-orphan") |
|
2041 | 2041 | |
|
2042 | 2042 | def __init__(self, group_name='', parent_group=None): |
|
2043 | 2043 | self.group_name = group_name |
|
2044 | 2044 | self.parent_group = parent_group |
|
2045 | 2045 | |
|
2046 | 2046 | def __unicode__(self): |
|
2047 | 2047 | return u"<%s('id:%s:%s')>" % (self.__class__.__name__, self.group_id, |
|
2048 | 2048 | self.group_name) |
|
2049 | 2049 | |
|
2050 | 2050 | @classmethod |
|
2051 | 2051 | def _generate_choice(cls, repo_group): |
|
2052 | 2052 | from webhelpers.html import literal as _literal |
|
2053 | 2053 | _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k)) |
|
2054 | 2054 | return repo_group.group_id, _name(repo_group.full_path_splitted) |
|
2055 | 2055 | |
|
2056 | 2056 | @classmethod |
|
2057 | 2057 | def groups_choices(cls, groups=None, show_empty_group=True): |
|
2058 | 2058 | if not groups: |
|
2059 | 2059 | groups = cls.query().all() |
|
2060 | 2060 | |
|
2061 | 2061 | repo_groups = [] |
|
2062 | 2062 | if show_empty_group: |
|
2063 | 2063 | repo_groups = [('-1', u'-- %s --' % _('No parent'))] |
|
2064 | 2064 | |
|
2065 | 2065 | repo_groups.extend([cls._generate_choice(x) for x in groups]) |
|
2066 | 2066 | |
|
2067 | 2067 | repo_groups = sorted( |
|
2068 | 2068 | repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0]) |
|
2069 | 2069 | return repo_groups |
|
2070 | 2070 | |
|
2071 | 2071 | @classmethod |
|
2072 | 2072 | def url_sep(cls): |
|
2073 | 2073 | return URL_SEP |
|
2074 | 2074 | |
|
2075 | 2075 | @classmethod |
|
2076 | 2076 | def get_by_group_name(cls, group_name, cache=False, case_insensitive=False): |
|
2077 | 2077 | if case_insensitive: |
|
2078 | 2078 | gr = cls.query().filter(func.lower(cls.group_name) |
|
2079 | 2079 | == func.lower(group_name)) |
|
2080 | 2080 | else: |
|
2081 | 2081 | gr = cls.query().filter(cls.group_name == group_name) |
|
2082 | 2082 | if cache: |
|
2083 | 2083 | gr = gr.options(FromCache( |
|
2084 | 2084 | "sql_cache_short", |
|
2085 | 2085 | "get_group_%s" % _hash_key(group_name))) |
|
2086 | 2086 | return gr.scalar() |
|
2087 | 2087 | |
|
2088 | 2088 | @classmethod |
|
2089 | 2089 | def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None), |
|
2090 | 2090 | case_insensitive=True): |
|
2091 | 2091 | q = RepoGroup.query() |
|
2092 | 2092 | |
|
2093 | 2093 | if not isinstance(user_id, Optional): |
|
2094 | 2094 | q = q.filter(RepoGroup.user_id == user_id) |
|
2095 | 2095 | |
|
2096 | 2096 | if not isinstance(group_id, Optional): |
|
2097 | 2097 | q = q.filter(RepoGroup.group_parent_id == group_id) |
|
2098 | 2098 | |
|
2099 | 2099 | if case_insensitive: |
|
2100 | 2100 | q = q.order_by(func.lower(RepoGroup.group_name)) |
|
2101 | 2101 | else: |
|
2102 | 2102 | q = q.order_by(RepoGroup.group_name) |
|
2103 | 2103 | return q.all() |
|
2104 | 2104 | |
|
2105 | 2105 | @property |
|
2106 | 2106 | def parents(self): |
|
2107 | 2107 | parents_recursion_limit = 10 |
|
2108 | 2108 | groups = [] |
|
2109 | 2109 | if self.parent_group is None: |
|
2110 | 2110 | return groups |
|
2111 | 2111 | cur_gr = self.parent_group |
|
2112 | 2112 | groups.insert(0, cur_gr) |
|
2113 | 2113 | cnt = 0 |
|
2114 | 2114 | while 1: |
|
2115 | 2115 | cnt += 1 |
|
2116 | 2116 | gr = getattr(cur_gr, 'parent_group', None) |
|
2117 | 2117 | cur_gr = cur_gr.parent_group |
|
2118 | 2118 | if gr is None: |
|
2119 | 2119 | break |
|
2120 | 2120 | if cnt == parents_recursion_limit: |
|
2121 | 2121 | # this will prevent accidental infinit loops |
|
2122 | 2122 | log.error(('more than %s parents found for group %s, stopping ' |
|
2123 | 2123 | 'recursive parent fetching' % (parents_recursion_limit, self))) |
|
2124 | 2124 | break |
|
2125 | 2125 | |
|
2126 | 2126 | groups.insert(0, gr) |
|
2127 | 2127 | return groups |
|
2128 | 2128 | |
|
2129 | 2129 | @property |
|
2130 | 2130 | def children(self): |
|
2131 | 2131 | return RepoGroup.query().filter(RepoGroup.parent_group == self) |
|
2132 | 2132 | |
|
2133 | 2133 | @property |
|
2134 | 2134 | def name(self): |
|
2135 | 2135 | return self.group_name.split(RepoGroup.url_sep())[-1] |
|
2136 | 2136 | |
|
2137 | 2137 | @property |
|
2138 | 2138 | def full_path(self): |
|
2139 | 2139 | return self.group_name |
|
2140 | 2140 | |
|
2141 | 2141 | @property |
|
2142 | 2142 | def full_path_splitted(self): |
|
2143 | 2143 | return self.group_name.split(RepoGroup.url_sep()) |
|
2144 | 2144 | |
|
2145 | 2145 | @property |
|
2146 | 2146 | def repositories(self): |
|
2147 | 2147 | return Repository.query()\ |
|
2148 | 2148 | .filter(Repository.group == self)\ |
|
2149 | 2149 | .order_by(Repository.repo_name) |
|
2150 | 2150 | |
|
2151 | 2151 | @property |
|
2152 | 2152 | def repositories_recursive_count(self): |
|
2153 | 2153 | cnt = self.repositories.count() |
|
2154 | 2154 | |
|
2155 | 2155 | def children_count(group): |
|
2156 | 2156 | cnt = 0 |
|
2157 | 2157 | for child in group.children: |
|
2158 | 2158 | cnt += child.repositories.count() |
|
2159 | 2159 | cnt += children_count(child) |
|
2160 | 2160 | return cnt |
|
2161 | 2161 | |
|
2162 | 2162 | return cnt + children_count(self) |
|
2163 | 2163 | |
|
2164 | 2164 | def _recursive_objects(self, include_repos=True): |
|
2165 | 2165 | all_ = [] |
|
2166 | 2166 | |
|
2167 | 2167 | def _get_members(root_gr): |
|
2168 | 2168 | if include_repos: |
|
2169 | 2169 | for r in root_gr.repositories: |
|
2170 | 2170 | all_.append(r) |
|
2171 | 2171 | childs = root_gr.children.all() |
|
2172 | 2172 | if childs: |
|
2173 | 2173 | for gr in childs: |
|
2174 | 2174 | all_.append(gr) |
|
2175 | 2175 | _get_members(gr) |
|
2176 | 2176 | |
|
2177 | 2177 | _get_members(self) |
|
2178 | 2178 | return [self] + all_ |
|
2179 | 2179 | |
|
2180 | 2180 | def recursive_groups_and_repos(self): |
|
2181 | 2181 | """ |
|
2182 | 2182 | Recursive return all groups, with repositories in those groups |
|
2183 | 2183 | """ |
|
2184 | 2184 | return self._recursive_objects() |
|
2185 | 2185 | |
|
2186 | 2186 | def recursive_groups(self): |
|
2187 | 2187 | """ |
|
2188 | 2188 | Returns all children groups for this group including children of children |
|
2189 | 2189 | """ |
|
2190 | 2190 | return self._recursive_objects(include_repos=False) |
|
2191 | 2191 | |
|
2192 | 2192 | def get_new_name(self, group_name): |
|
2193 | 2193 | """ |
|
2194 | 2194 | returns new full group name based on parent and new name |
|
2195 | 2195 | |
|
2196 | 2196 | :param group_name: |
|
2197 | 2197 | """ |
|
2198 | 2198 | path_prefix = (self.parent_group.full_path_splitted if |
|
2199 | 2199 | self.parent_group else []) |
|
2200 | 2200 | return RepoGroup.url_sep().join(path_prefix + [group_name]) |
|
2201 | 2201 | |
|
2202 | 2202 | def permissions(self, with_admins=True, with_owner=True): |
|
2203 | 2203 | q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self) |
|
2204 | 2204 | q = q.options(joinedload(UserRepoGroupToPerm.group), |
|
2205 | 2205 | joinedload(UserRepoGroupToPerm.user), |
|
2206 | 2206 | joinedload(UserRepoGroupToPerm.permission),) |
|
2207 | 2207 | |
|
2208 | 2208 | # get owners and admins and permissions. We do a trick of re-writing |
|
2209 | 2209 | # objects from sqlalchemy to named-tuples due to sqlalchemy session |
|
2210 | 2210 | # has a global reference and changing one object propagates to all |
|
2211 | 2211 | # others. This means if admin is also an owner admin_row that change |
|
2212 | 2212 | # would propagate to both objects |
|
2213 | 2213 | perm_rows = [] |
|
2214 | 2214 | for _usr in q.all(): |
|
2215 | 2215 | usr = AttributeDict(_usr.user.get_dict()) |
|
2216 | 2216 | usr.permission = _usr.permission.permission_name |
|
2217 | 2217 | perm_rows.append(usr) |
|
2218 | 2218 | |
|
2219 | 2219 | # filter the perm rows by 'default' first and then sort them by |
|
2220 | 2220 | # admin,write,read,none permissions sorted again alphabetically in |
|
2221 | 2221 | # each group |
|
2222 | 2222 | perm_rows = sorted(perm_rows, key=display_sort) |
|
2223 | 2223 | |
|
2224 | 2224 | _admin_perm = 'group.admin' |
|
2225 | 2225 | owner_row = [] |
|
2226 | 2226 | if with_owner: |
|
2227 | 2227 | usr = AttributeDict(self.user.get_dict()) |
|
2228 | 2228 | usr.owner_row = True |
|
2229 | 2229 | usr.permission = _admin_perm |
|
2230 | 2230 | owner_row.append(usr) |
|
2231 | 2231 | |
|
2232 | 2232 | super_admin_rows = [] |
|
2233 | 2233 | if with_admins: |
|
2234 | 2234 | for usr in User.get_all_super_admins(): |
|
2235 | 2235 | # if this admin is also owner, don't double the record |
|
2236 | 2236 | if usr.user_id == owner_row[0].user_id: |
|
2237 | 2237 | owner_row[0].admin_row = True |
|
2238 | 2238 | else: |
|
2239 | 2239 | usr = AttributeDict(usr.get_dict()) |
|
2240 | 2240 | usr.admin_row = True |
|
2241 | 2241 | usr.permission = _admin_perm |
|
2242 | 2242 | super_admin_rows.append(usr) |
|
2243 | 2243 | |
|
2244 | 2244 | return super_admin_rows + owner_row + perm_rows |
|
2245 | 2245 | |
|
2246 | 2246 | def permission_user_groups(self): |
|
2247 | 2247 | q = UserGroupRepoGroupToPerm.query().filter(UserGroupRepoGroupToPerm.group == self) |
|
2248 | 2248 | q = q.options(joinedload(UserGroupRepoGroupToPerm.group), |
|
2249 | 2249 | joinedload(UserGroupRepoGroupToPerm.users_group), |
|
2250 | 2250 | joinedload(UserGroupRepoGroupToPerm.permission),) |
|
2251 | 2251 | |
|
2252 | 2252 | perm_rows = [] |
|
2253 | 2253 | for _user_group in q.all(): |
|
2254 | 2254 | usr = AttributeDict(_user_group.users_group.get_dict()) |
|
2255 | 2255 | usr.permission = _user_group.permission.permission_name |
|
2256 | 2256 | perm_rows.append(usr) |
|
2257 | 2257 | |
|
2258 | 2258 | return perm_rows |
|
2259 | 2259 | |
|
2260 | 2260 | def get_api_data(self): |
|
2261 | 2261 | """ |
|
2262 | 2262 | Common function for generating api data |
|
2263 | 2263 | |
|
2264 | 2264 | """ |
|
2265 | 2265 | group = self |
|
2266 | 2266 | data = { |
|
2267 | 2267 | 'group_id': group.group_id, |
|
2268 | 2268 | 'group_name': group.group_name, |
|
2269 | 2269 | 'group_description': group.group_description, |
|
2270 | 2270 | 'parent_group': group.parent_group.group_name if group.parent_group else None, |
|
2271 | 2271 | 'repositories': [x.repo_name for x in group.repositories], |
|
2272 | 2272 | 'owner': group.user.username, |
|
2273 | 2273 | } |
|
2274 | 2274 | return data |
|
2275 | 2275 | |
|
2276 | 2276 | |
|
2277 | 2277 | class Permission(Base, BaseModel): |
|
2278 | 2278 | __tablename__ = 'permissions' |
|
2279 | 2279 | __table_args__ = ( |
|
2280 | 2280 | Index('p_perm_name_idx', 'permission_name'), |
|
2281 | 2281 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
2282 | 2282 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, |
|
2283 | 2283 | ) |
|
2284 | 2284 | PERMS = [ |
|
2285 | 2285 | ('hg.admin', _('RhodeCode Super Administrator')), |
|
2286 | 2286 | |
|
2287 | 2287 | ('repository.none', _('Repository no access')), |
|
2288 | 2288 | ('repository.read', _('Repository read access')), |
|
2289 | 2289 | ('repository.write', _('Repository write access')), |
|
2290 | 2290 | ('repository.admin', _('Repository admin access')), |
|
2291 | 2291 | |
|
2292 | 2292 | ('group.none', _('Repository group no access')), |
|
2293 | 2293 | ('group.read', _('Repository group read access')), |
|
2294 | 2294 | ('group.write', _('Repository group write access')), |
|
2295 | 2295 | ('group.admin', _('Repository group admin access')), |
|
2296 | 2296 | |
|
2297 | 2297 | ('usergroup.none', _('User group no access')), |
|
2298 | 2298 | ('usergroup.read', _('User group read access')), |
|
2299 | 2299 | ('usergroup.write', _('User group write access')), |
|
2300 | 2300 | ('usergroup.admin', _('User group admin access')), |
|
2301 | 2301 | |
|
2302 | 2302 | ('hg.repogroup.create.false', _('Repository Group creation disabled')), |
|
2303 | 2303 | ('hg.repogroup.create.true', _('Repository Group creation enabled')), |
|
2304 | 2304 | |
|
2305 | 2305 | ('hg.usergroup.create.false', _('User Group creation disabled')), |
|
2306 | 2306 | ('hg.usergroup.create.true', _('User Group creation enabled')), |
|
2307 | 2307 | |
|
2308 | 2308 | ('hg.create.none', _('Repository creation disabled')), |
|
2309 | 2309 | ('hg.create.repository', _('Repository creation enabled')), |
|
2310 | 2310 | ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')), |
|
2311 | 2311 | ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')), |
|
2312 | 2312 | |
|
2313 | 2313 | ('hg.fork.none', _('Repository forking disabled')), |
|
2314 | 2314 | ('hg.fork.repository', _('Repository forking enabled')), |
|
2315 | 2315 | |
|
2316 | 2316 | ('hg.register.none', _('Registration disabled')), |
|
2317 | 2317 | ('hg.register.manual_activate', _('User Registration with manual account activation')), |
|
2318 | 2318 | ('hg.register.auto_activate', _('User Registration with automatic account activation')), |
|
2319 | 2319 | |
|
2320 | 2320 | ('hg.extern_activate.manual', _('Manual activation of external account')), |
|
2321 | 2321 | ('hg.extern_activate.auto', _('Automatic activation of external account')), |
|
2322 | 2322 | |
|
2323 | 2323 | ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')), |
|
2324 | 2324 | ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')), |
|
2325 | 2325 | ] |
|
2326 | 2326 | |
|
2327 | 2327 | # definition of system default permissions for DEFAULT user |
|
2328 | 2328 | DEFAULT_USER_PERMISSIONS = [ |
|
2329 | 2329 | 'repository.read', |
|
2330 | 2330 | 'group.read', |
|
2331 | 2331 | 'usergroup.read', |
|
2332 | 2332 | 'hg.create.repository', |
|
2333 | 2333 | 'hg.repogroup.create.false', |
|
2334 | 2334 | 'hg.usergroup.create.false', |
|
2335 | 2335 | 'hg.create.write_on_repogroup.true', |
|
2336 | 2336 | 'hg.fork.repository', |
|
2337 | 2337 | 'hg.register.manual_activate', |
|
2338 | 2338 | 'hg.extern_activate.auto', |
|
2339 | 2339 | 'hg.inherit_default_perms.true', |
|
2340 | 2340 | ] |
|
2341 | 2341 | |
|
2342 | 2342 | # defines which permissions are more important higher the more important |
|
2343 | 2343 | # Weight defines which permissions are more important. |
|
2344 | 2344 | # The higher number the more important. |
|
2345 | 2345 | PERM_WEIGHTS = { |
|
2346 | 2346 | 'repository.none': 0, |
|
2347 | 2347 | 'repository.read': 1, |
|
2348 | 2348 | 'repository.write': 3, |
|
2349 | 2349 | 'repository.admin': 4, |
|
2350 | 2350 | |
|
2351 | 2351 | 'group.none': 0, |
|
2352 | 2352 | 'group.read': 1, |
|
2353 | 2353 | 'group.write': 3, |
|
2354 | 2354 | 'group.admin': 4, |
|
2355 | 2355 | |
|
2356 | 2356 | 'usergroup.none': 0, |
|
2357 | 2357 | 'usergroup.read': 1, |
|
2358 | 2358 | 'usergroup.write': 3, |
|
2359 | 2359 | 'usergroup.admin': 4, |
|
2360 | 2360 | |
|
2361 | 2361 | 'hg.repogroup.create.false': 0, |
|
2362 | 2362 | 'hg.repogroup.create.true': 1, |
|
2363 | 2363 | |
|
2364 | 2364 | 'hg.usergroup.create.false': 0, |
|
2365 | 2365 | 'hg.usergroup.create.true': 1, |
|
2366 | 2366 | |
|
2367 | 2367 | 'hg.fork.none': 0, |
|
2368 | 2368 | 'hg.fork.repository': 1, |
|
2369 | 2369 | 'hg.create.none': 0, |
|
2370 | 2370 | 'hg.create.repository': 1 |
|
2371 | 2371 | } |
|
2372 | 2372 | |
|
2373 | 2373 | permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
2374 | 2374 | permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None) |
|
2375 | 2375 | permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None) |
|
2376 | 2376 | |
|
2377 | 2377 | def __unicode__(self): |
|
2378 | 2378 | return u"<%s('%s:%s')>" % ( |
|
2379 | 2379 | self.__class__.__name__, self.permission_id, self.permission_name |
|
2380 | 2380 | ) |
|
2381 | 2381 | |
|
2382 | 2382 | @classmethod |
|
2383 | 2383 | def get_by_key(cls, key): |
|
2384 | 2384 | return cls.query().filter(cls.permission_name == key).scalar() |
|
2385 | 2385 | |
|
2386 | 2386 | @classmethod |
|
2387 | 2387 | def get_default_repo_perms(cls, user_id, repo_id=None): |
|
2388 | 2388 | q = Session().query(UserRepoToPerm, Repository, Permission)\ |
|
2389 | 2389 | .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\ |
|
2390 | 2390 | .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\ |
|
2391 | 2391 | .filter(UserRepoToPerm.user_id == user_id) |
|
2392 | 2392 | if repo_id: |
|
2393 | 2393 | q = q.filter(UserRepoToPerm.repository_id == repo_id) |
|
2394 | 2394 | return q.all() |
|
2395 | 2395 | |
|
2396 | 2396 | @classmethod |
|
2397 | 2397 | def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None): |
|
2398 | 2398 | q = Session().query(UserGroupRepoToPerm, Repository, Permission)\ |
|
2399 | 2399 | .join( |
|
2400 | 2400 | Permission, |
|
2401 | 2401 | UserGroupRepoToPerm.permission_id == Permission.permission_id)\ |
|
2402 | 2402 | .join( |
|
2403 | 2403 | Repository, |
|
2404 | 2404 | UserGroupRepoToPerm.repository_id == Repository.repo_id)\ |
|
2405 | 2405 | .join( |
|
2406 | 2406 | UserGroup, |
|
2407 | 2407 | UserGroupRepoToPerm.users_group_id == |
|
2408 | 2408 | UserGroup.users_group_id)\ |
|
2409 | 2409 | .join( |
|
2410 | 2410 | UserGroupMember, |
|
2411 | 2411 | UserGroupRepoToPerm.users_group_id == |
|
2412 | 2412 | UserGroupMember.users_group_id)\ |
|
2413 | 2413 | .filter( |
|
2414 | 2414 | UserGroupMember.user_id == user_id, |
|
2415 | 2415 | UserGroup.users_group_active == true()) |
|
2416 | 2416 | if repo_id: |
|
2417 | 2417 | q = q.filter(UserGroupRepoToPerm.repository_id == repo_id) |
|
2418 | 2418 | return q.all() |
|
2419 | 2419 | |
|
2420 | 2420 | @classmethod |
|
2421 | 2421 | def get_default_group_perms(cls, user_id, repo_group_id=None): |
|
2422 | 2422 | q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\ |
|
2423 | 2423 | .join((Permission, UserRepoGroupToPerm.permission_id == Permission.permission_id))\ |
|
2424 | 2424 | .join((RepoGroup, UserRepoGroupToPerm.group_id == RepoGroup.group_id))\ |
|
2425 | 2425 | .filter(UserRepoGroupToPerm.user_id == user_id) |
|
2426 | 2426 | if repo_group_id: |
|
2427 | 2427 | q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id) |
|
2428 | 2428 | return q.all() |
|
2429 | 2429 | |
|
2430 | 2430 | @classmethod |
|
2431 | 2431 | def get_default_group_perms_from_user_group( |
|
2432 | 2432 | cls, user_id, repo_group_id=None): |
|
2433 | 2433 | q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\ |
|
2434 | 2434 | .join( |
|
2435 | 2435 | Permission, |
|
2436 | 2436 | UserGroupRepoGroupToPerm.permission_id == |
|
2437 | 2437 | Permission.permission_id)\ |
|
2438 | 2438 | .join( |
|
2439 | 2439 | RepoGroup, |
|
2440 | 2440 | UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\ |
|
2441 | 2441 | .join( |
|
2442 | 2442 | UserGroup, |
|
2443 | 2443 | UserGroupRepoGroupToPerm.users_group_id == |
|
2444 | 2444 | UserGroup.users_group_id)\ |
|
2445 | 2445 | .join( |
|
2446 | 2446 | UserGroupMember, |
|
2447 | 2447 | UserGroupRepoGroupToPerm.users_group_id == |
|
2448 | 2448 | UserGroupMember.users_group_id)\ |
|
2449 | 2449 | .filter( |
|
2450 | 2450 | UserGroupMember.user_id == user_id, |
|
2451 | 2451 | UserGroup.users_group_active == true()) |
|
2452 | 2452 | if repo_group_id: |
|
2453 | 2453 | q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id) |
|
2454 | 2454 | return q.all() |
|
2455 | 2455 | |
|
2456 | 2456 | @classmethod |
|
2457 | 2457 | def get_default_user_group_perms(cls, user_id, user_group_id=None): |
|
2458 | 2458 | q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\ |
|
2459 | 2459 | .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\ |
|
2460 | 2460 | .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\ |
|
2461 | 2461 | .filter(UserUserGroupToPerm.user_id == user_id) |
|
2462 | 2462 | if user_group_id: |
|
2463 | 2463 | q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id) |
|
2464 | 2464 | return q.all() |
|
2465 | 2465 | |
|
2466 | 2466 | @classmethod |
|
2467 | 2467 | def get_default_user_group_perms_from_user_group( |
|
2468 | 2468 | cls, user_id, user_group_id=None): |
|
2469 | 2469 | TargetUserGroup = aliased(UserGroup, name='target_user_group') |
|
2470 | 2470 | q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\ |
|
2471 | 2471 | .join( |
|
2472 | 2472 | Permission, |
|
2473 | 2473 | UserGroupUserGroupToPerm.permission_id == |
|
2474 | 2474 | Permission.permission_id)\ |
|
2475 | 2475 | .join( |
|
2476 | 2476 | TargetUserGroup, |
|
2477 | 2477 | UserGroupUserGroupToPerm.target_user_group_id == |
|
2478 | 2478 | TargetUserGroup.users_group_id)\ |
|
2479 | 2479 | .join( |
|
2480 | 2480 | UserGroup, |
|
2481 | 2481 | UserGroupUserGroupToPerm.user_group_id == |
|
2482 | 2482 | UserGroup.users_group_id)\ |
|
2483 | 2483 | .join( |
|
2484 | 2484 | UserGroupMember, |
|
2485 | 2485 | UserGroupUserGroupToPerm.user_group_id == |
|
2486 | 2486 | UserGroupMember.users_group_id)\ |
|
2487 | 2487 | .filter( |
|
2488 | 2488 | UserGroupMember.user_id == user_id, |
|
2489 | 2489 | UserGroup.users_group_active == true()) |
|
2490 | 2490 | if user_group_id: |
|
2491 | 2491 | q = q.filter( |
|
2492 | 2492 | UserGroupUserGroupToPerm.user_group_id == user_group_id) |
|
2493 | 2493 | |
|
2494 | 2494 | return q.all() |
|
2495 | 2495 | |
|
2496 | 2496 | |
|
2497 | 2497 | class UserRepoToPerm(Base, BaseModel): |
|
2498 | 2498 | __tablename__ = 'repo_to_perm' |
|
2499 | 2499 | __table_args__ = ( |
|
2500 | 2500 | UniqueConstraint('user_id', 'repository_id', 'permission_id'), |
|
2501 | 2501 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
2502 | 2502 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} |
|
2503 | 2503 | ) |
|
2504 | 2504 | repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
2505 | 2505 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
2506 | 2506 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
2507 | 2507 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) |
|
2508 | 2508 | |
|
2509 | 2509 | user = relationship('User') |
|
2510 | 2510 | repository = relationship('Repository') |
|
2511 | 2511 | permission = relationship('Permission') |
|
2512 | 2512 | |
|
2513 | 2513 | @classmethod |
|
2514 | 2514 | def create(cls, user, repository, permission): |
|
2515 | 2515 | n = cls() |
|
2516 | 2516 | n.user = user |
|
2517 | 2517 | n.repository = repository |
|
2518 | 2518 | n.permission = permission |
|
2519 | 2519 | Session().add(n) |
|
2520 | 2520 | return n |
|
2521 | 2521 | |
|
2522 | 2522 | def __unicode__(self): |
|
2523 | 2523 | return u'<%s => %s >' % (self.user, self.repository) |
|
2524 | 2524 | |
|
2525 | 2525 | |
|
2526 | 2526 | class UserUserGroupToPerm(Base, BaseModel): |
|
2527 | 2527 | __tablename__ = 'user_user_group_to_perm' |
|
2528 | 2528 | __table_args__ = ( |
|
2529 | 2529 | UniqueConstraint('user_id', 'user_group_id', 'permission_id'), |
|
2530 | 2530 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
2531 | 2531 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} |
|
2532 | 2532 | ) |
|
2533 | 2533 | user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
2534 | 2534 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
2535 | 2535 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
2536 | 2536 | user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
|
2537 | 2537 | |
|
2538 | 2538 | user = relationship('User') |
|
2539 | 2539 | user_group = relationship('UserGroup') |
|
2540 | 2540 | permission = relationship('Permission') |
|
2541 | 2541 | |
|
2542 | 2542 | @classmethod |
|
2543 | 2543 | def create(cls, user, user_group, permission): |
|
2544 | 2544 | n = cls() |
|
2545 | 2545 | n.user = user |
|
2546 | 2546 | n.user_group = user_group |
|
2547 | 2547 | n.permission = permission |
|
2548 | 2548 | Session().add(n) |
|
2549 | 2549 | return n |
|
2550 | 2550 | |
|
2551 | 2551 | def __unicode__(self): |
|
2552 | 2552 | return u'<%s => %s >' % (self.user, self.user_group) |
|
2553 | 2553 | |
|
2554 | 2554 | |
|
2555 | 2555 | class UserToPerm(Base, BaseModel): |
|
2556 | 2556 | __tablename__ = 'user_to_perm' |
|
2557 | 2557 | __table_args__ = ( |
|
2558 | 2558 | UniqueConstraint('user_id', 'permission_id'), |
|
2559 | 2559 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
2560 | 2560 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} |
|
2561 | 2561 | ) |
|
2562 | 2562 | user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
2563 | 2563 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
2564 | 2564 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
2565 | 2565 | |
|
2566 | 2566 | user = relationship('User') |
|
2567 | 2567 | permission = relationship('Permission', lazy='joined') |
|
2568 | 2568 | |
|
2569 | 2569 | def __unicode__(self): |
|
2570 | 2570 | return u'<%s => %s >' % (self.user, self.permission) |
|
2571 | 2571 | |
|
2572 | 2572 | |
|
2573 | 2573 | class UserGroupRepoToPerm(Base, BaseModel): |
|
2574 | 2574 | __tablename__ = 'users_group_repo_to_perm' |
|
2575 | 2575 | __table_args__ = ( |
|
2576 | 2576 | UniqueConstraint('repository_id', 'users_group_id', 'permission_id'), |
|
2577 | 2577 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
2578 | 2578 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} |
|
2579 | 2579 | ) |
|
2580 | 2580 | users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
2581 | 2581 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
|
2582 | 2582 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
2583 | 2583 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) |
|
2584 | 2584 | |
|
2585 | 2585 | users_group = relationship('UserGroup') |
|
2586 | 2586 | permission = relationship('Permission') |
|
2587 | 2587 | repository = relationship('Repository') |
|
2588 | 2588 | |
|
2589 | 2589 | @classmethod |
|
2590 | 2590 | def create(cls, users_group, repository, permission): |
|
2591 | 2591 | n = cls() |
|
2592 | 2592 | n.users_group = users_group |
|
2593 | 2593 | n.repository = repository |
|
2594 | 2594 | n.permission = permission |
|
2595 | 2595 | Session().add(n) |
|
2596 | 2596 | return n |
|
2597 | 2597 | |
|
2598 | 2598 | def __unicode__(self): |
|
2599 | 2599 | return u'<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository) |
|
2600 | 2600 | |
|
2601 | 2601 | |
|
2602 | 2602 | class UserGroupUserGroupToPerm(Base, BaseModel): |
|
2603 | 2603 | __tablename__ = 'user_group_user_group_to_perm' |
|
2604 | 2604 | __table_args__ = ( |
|
2605 | 2605 | UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'), |
|
2606 | 2606 | CheckConstraint('target_user_group_id != user_group_id'), |
|
2607 | 2607 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
2608 | 2608 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} |
|
2609 | 2609 | ) |
|
2610 | 2610 | user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
2611 | 2611 | target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
|
2612 | 2612 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
2613 | 2613 | user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
|
2614 | 2614 | |
|
2615 | 2615 | target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id') |
|
2616 | 2616 | user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id') |
|
2617 | 2617 | permission = relationship('Permission') |
|
2618 | 2618 | |
|
2619 | 2619 | @classmethod |
|
2620 | 2620 | def create(cls, target_user_group, user_group, permission): |
|
2621 | 2621 | n = cls() |
|
2622 | 2622 | n.target_user_group = target_user_group |
|
2623 | 2623 | n.user_group = user_group |
|
2624 | 2624 | n.permission = permission |
|
2625 | 2625 | Session().add(n) |
|
2626 | 2626 | return n |
|
2627 | 2627 | |
|
2628 | 2628 | def __unicode__(self): |
|
2629 | 2629 | return u'<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group) |
|
2630 | 2630 | |
|
2631 | 2631 | |
|
2632 | 2632 | class UserGroupToPerm(Base, BaseModel): |
|
2633 | 2633 | __tablename__ = 'users_group_to_perm' |
|
2634 | 2634 | __table_args__ = ( |
|
2635 | 2635 | UniqueConstraint('users_group_id', 'permission_id',), |
|
2636 | 2636 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
2637 | 2637 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} |
|
2638 | 2638 | ) |
|
2639 | 2639 | users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
2640 | 2640 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
|
2641 | 2641 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
2642 | 2642 | |
|
2643 | 2643 | users_group = relationship('UserGroup') |
|
2644 | 2644 | permission = relationship('Permission') |
|
2645 | 2645 | |
|
2646 | 2646 | |
|
2647 | 2647 | class UserRepoGroupToPerm(Base, BaseModel): |
|
2648 | 2648 | __tablename__ = 'user_repo_group_to_perm' |
|
2649 | 2649 | __table_args__ = ( |
|
2650 | 2650 | UniqueConstraint('user_id', 'group_id', 'permission_id'), |
|
2651 | 2651 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
2652 | 2652 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} |
|
2653 | 2653 | ) |
|
2654 | 2654 | |
|
2655 | 2655 | group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
2656 | 2656 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
2657 | 2657 | group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None) |
|
2658 | 2658 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
2659 | 2659 | |
|
2660 | 2660 | user = relationship('User') |
|
2661 | 2661 | group = relationship('RepoGroup') |
|
2662 | 2662 | permission = relationship('Permission') |
|
2663 | 2663 | |
|
2664 | 2664 | @classmethod |
|
2665 | 2665 | def create(cls, user, repository_group, permission): |
|
2666 | 2666 | n = cls() |
|
2667 | 2667 | n.user = user |
|
2668 | 2668 | n.group = repository_group |
|
2669 | 2669 | n.permission = permission |
|
2670 | 2670 | Session().add(n) |
|
2671 | 2671 | return n |
|
2672 | 2672 | |
|
2673 | 2673 | |
|
2674 | 2674 | class UserGroupRepoGroupToPerm(Base, BaseModel): |
|
2675 | 2675 | __tablename__ = 'users_group_repo_group_to_perm' |
|
2676 | 2676 | __table_args__ = ( |
|
2677 | 2677 | UniqueConstraint('users_group_id', 'group_id'), |
|
2678 | 2678 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
2679 | 2679 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} |
|
2680 | 2680 | ) |
|
2681 | 2681 | |
|
2682 | 2682 | users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
2683 | 2683 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
|
2684 | 2684 | group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None) |
|
2685 | 2685 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
2686 | 2686 | |
|
2687 | 2687 | users_group = relationship('UserGroup') |
|
2688 | 2688 | permission = relationship('Permission') |
|
2689 | 2689 | group = relationship('RepoGroup') |
|
2690 | 2690 | |
|
2691 | 2691 | @classmethod |
|
2692 | 2692 | def create(cls, user_group, repository_group, permission): |
|
2693 | 2693 | n = cls() |
|
2694 | 2694 | n.users_group = user_group |
|
2695 | 2695 | n.group = repository_group |
|
2696 | 2696 | n.permission = permission |
|
2697 | 2697 | Session().add(n) |
|
2698 | 2698 | return n |
|
2699 | 2699 | |
|
2700 | 2700 | def __unicode__(self): |
|
2701 | 2701 | return u'<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group) |
|
2702 | 2702 | |
|
2703 | 2703 | |
|
2704 | 2704 | class Statistics(Base, BaseModel): |
|
2705 | 2705 | __tablename__ = 'statistics' |
|
2706 | 2706 | __table_args__ = ( |
|
2707 | 2707 | UniqueConstraint('repository_id'), |
|
2708 | 2708 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
2709 | 2709 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} |
|
2710 | 2710 | ) |
|
2711 | 2711 | stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
2712 | 2712 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None) |
|
2713 | 2713 | stat_on_revision = Column("stat_on_revision", Integer(), nullable=False) |
|
2714 | 2714 | commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data |
|
2715 | 2715 | commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data |
|
2716 | 2716 | languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data |
|
2717 | 2717 | |
|
2718 | 2718 | repository = relationship('Repository', single_parent=True) |
|
2719 | 2719 | |
|
2720 | 2720 | |
|
2721 | 2721 | class UserFollowing(Base, BaseModel): |
|
2722 | 2722 | __tablename__ = 'user_followings' |
|
2723 | 2723 | __table_args__ = ( |
|
2724 | 2724 | UniqueConstraint('user_id', 'follows_repository_id'), |
|
2725 | 2725 | UniqueConstraint('user_id', 'follows_user_id'), |
|
2726 | 2726 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
2727 | 2727 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} |
|
2728 | 2728 | ) |
|
2729 | 2729 | |
|
2730 | 2730 | user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
2731 | 2731 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
2732 | 2732 | follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None) |
|
2733 | 2733 | follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) |
|
2734 | 2734 | follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now) |
|
2735 | 2735 | |
|
2736 | 2736 | user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id') |
|
2737 | 2737 | |
|
2738 | 2738 | follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id') |
|
2739 | 2739 | follows_repository = relationship('Repository', order_by='Repository.repo_name') |
|
2740 | 2740 | |
|
2741 | 2741 | @classmethod |
|
2742 | 2742 | def get_repo_followers(cls, repo_id): |
|
2743 | 2743 | return cls.query().filter(cls.follows_repo_id == repo_id) |
|
2744 | 2744 | |
|
2745 | 2745 | |
|
2746 | 2746 | class CacheKey(Base, BaseModel): |
|
2747 | 2747 | __tablename__ = 'cache_invalidation' |
|
2748 | 2748 | __table_args__ = ( |
|
2749 | 2749 | UniqueConstraint('cache_key'), |
|
2750 | 2750 | Index('key_idx', 'cache_key'), |
|
2751 | 2751 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
2752 | 2752 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, |
|
2753 | 2753 | ) |
|
2754 | 2754 | CACHE_TYPE_ATOM = 'ATOM' |
|
2755 | 2755 | CACHE_TYPE_RSS = 'RSS' |
|
2756 | 2756 | CACHE_TYPE_README = 'README' |
|
2757 | 2757 | |
|
2758 | 2758 | cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
2759 | 2759 | cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None) |
|
2760 | 2760 | cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None) |
|
2761 | 2761 | cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False) |
|
2762 | 2762 | |
|
2763 | 2763 | def __init__(self, cache_key, cache_args=''): |
|
2764 | 2764 | self.cache_key = cache_key |
|
2765 | 2765 | self.cache_args = cache_args |
|
2766 | 2766 | self.cache_active = False |
|
2767 | 2767 | |
|
2768 | 2768 | def __unicode__(self): |
|
2769 | 2769 | return u"<%s('%s:%s[%s]')>" % ( |
|
2770 | 2770 | self.__class__.__name__, |
|
2771 | 2771 | self.cache_id, self.cache_key, self.cache_active) |
|
2772 | 2772 | |
|
2773 | 2773 | def _cache_key_partition(self): |
|
2774 | 2774 | prefix, repo_name, suffix = self.cache_key.partition(self.cache_args) |
|
2775 | 2775 | return prefix, repo_name, suffix |
|
2776 | 2776 | |
|
2777 | 2777 | def get_prefix(self): |
|
2778 | 2778 | """ |
|
2779 | 2779 | Try to extract prefix from existing cache key. The key could consist |
|
2780 | 2780 | of prefix, repo_name, suffix |
|
2781 | 2781 | """ |
|
2782 | 2782 | # this returns prefix, repo_name, suffix |
|
2783 | 2783 | return self._cache_key_partition()[0] |
|
2784 | 2784 | |
|
2785 | 2785 | def get_suffix(self): |
|
2786 | 2786 | """ |
|
2787 | 2787 | get suffix that might have been used in _get_cache_key to |
|
2788 | 2788 | generate self.cache_key. Only used for informational purposes |
|
2789 | 2789 | in repo_edit.html. |
|
2790 | 2790 | """ |
|
2791 | 2791 | # prefix, repo_name, suffix |
|
2792 | 2792 | return self._cache_key_partition()[2] |
|
2793 | 2793 | |
|
2794 | 2794 | @classmethod |
|
2795 | 2795 | def delete_all_cache(cls): |
|
2796 | 2796 | """ |
|
2797 | 2797 | Delete all cache keys from database. |
|
2798 | 2798 | Should only be run when all instances are down and all entries |
|
2799 | 2799 | thus stale. |
|
2800 | 2800 | """ |
|
2801 | 2801 | cls.query().delete() |
|
2802 | 2802 | Session().commit() |
|
2803 | 2803 | |
|
2804 | 2804 | @classmethod |
|
2805 | 2805 | def get_cache_key(cls, repo_name, cache_type): |
|
2806 | 2806 | """ |
|
2807 | 2807 | |
|
2808 | 2808 | Generate a cache key for this process of RhodeCode instance. |
|
2809 | 2809 | Prefix most likely will be process id or maybe explicitly set |
|
2810 | 2810 | instance_id from .ini file. |
|
2811 | 2811 | """ |
|
2812 | 2812 | import rhodecode |
|
2813 | 2813 | prefix = safe_unicode(rhodecode.CONFIG.get('instance_id') or '') |
|
2814 | 2814 | |
|
2815 | 2815 | repo_as_unicode = safe_unicode(repo_name) |
|
2816 | 2816 | key = u'{}_{}'.format(repo_as_unicode, cache_type) \ |
|
2817 | 2817 | if cache_type else repo_as_unicode |
|
2818 | 2818 | |
|
2819 | 2819 | return u'{}{}'.format(prefix, key) |
|
2820 | 2820 | |
|
2821 | 2821 | @classmethod |
|
2822 | 2822 | def set_invalidate(cls, repo_name, delete=False): |
|
2823 | 2823 | """ |
|
2824 | 2824 | Mark all caches of a repo as invalid in the database. |
|
2825 | 2825 | """ |
|
2826 | 2826 | |
|
2827 | 2827 | try: |
|
2828 | 2828 | qry = Session().query(cls).filter(cls.cache_args == repo_name) |
|
2829 | 2829 | if delete: |
|
2830 | 2830 | log.debug('cache objects deleted for repo %s', |
|
2831 | 2831 | safe_str(repo_name)) |
|
2832 | 2832 | qry.delete() |
|
2833 | 2833 | else: |
|
2834 | 2834 | log.debug('cache objects marked as invalid for repo %s', |
|
2835 | 2835 | safe_str(repo_name)) |
|
2836 | 2836 | qry.update({"cache_active": False}) |
|
2837 | 2837 | |
|
2838 | 2838 | Session().commit() |
|
2839 | 2839 | except Exception: |
|
2840 | 2840 | log.exception( |
|
2841 | 2841 | 'Cache key invalidation failed for repository %s', |
|
2842 | 2842 | safe_str(repo_name)) |
|
2843 | 2843 | Session().rollback() |
|
2844 | 2844 | |
|
2845 | 2845 | @classmethod |
|
2846 | 2846 | def get_active_cache(cls, cache_key): |
|
2847 | 2847 | inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar() |
|
2848 | 2848 | if inv_obj: |
|
2849 | 2849 | return inv_obj |
|
2850 | 2850 | return None |
|
2851 | 2851 | |
|
2852 | 2852 | @classmethod |
|
2853 | 2853 | def repo_context_cache(cls, compute_func, repo_name, cache_type, |
|
2854 | 2854 | thread_scoped=False): |
|
2855 | 2855 | """ |
|
2856 | 2856 | @cache_region('long_term') |
|
2857 | 2857 | def _heavy_calculation(cache_key): |
|
2858 | 2858 | return 'result' |
|
2859 | 2859 | |
|
2860 | 2860 | cache_context = CacheKey.repo_context_cache( |
|
2861 | 2861 | _heavy_calculation, repo_name, cache_type) |
|
2862 | 2862 | |
|
2863 | 2863 | with cache_context as context: |
|
2864 | 2864 | context.invalidate() |
|
2865 | 2865 | computed = context.compute() |
|
2866 | 2866 | |
|
2867 | 2867 | assert computed == 'result' |
|
2868 | 2868 | """ |
|
2869 | 2869 | from rhodecode.lib import caches |
|
2870 | 2870 | return caches.InvalidationContext( |
|
2871 | 2871 | compute_func, repo_name, cache_type, thread_scoped=thread_scoped) |
|
2872 | 2872 | |
|
2873 | 2873 | |
|
2874 | 2874 | class ChangesetComment(Base, BaseModel): |
|
2875 | 2875 | __tablename__ = 'changeset_comments' |
|
2876 | 2876 | __table_args__ = ( |
|
2877 | 2877 | Index('cc_revision_idx', 'revision'), |
|
2878 | 2878 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
2879 | 2879 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, |
|
2880 | 2880 | ) |
|
2881 | 2881 | |
|
2882 | 2882 | COMMENT_OUTDATED = u'comment_outdated' |
|
2883 | 2883 | |
|
2884 | 2884 | comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True) |
|
2885 | 2885 | repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False) |
|
2886 | 2886 | revision = Column('revision', String(40), nullable=True) |
|
2887 | 2887 | pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True) |
|
2888 | 2888 | pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True) |
|
2889 | 2889 | line_no = Column('line_no', Unicode(10), nullable=True) |
|
2890 | 2890 | hl_lines = Column('hl_lines', Unicode(512), nullable=True) |
|
2891 | 2891 | f_path = Column('f_path', Unicode(1000), nullable=True) |
|
2892 | 2892 | user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False) |
|
2893 | 2893 | text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False) |
|
2894 | 2894 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
2895 | 2895 | modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
2896 | 2896 | renderer = Column('renderer', Unicode(64), nullable=True) |
|
2897 | 2897 | display_state = Column('display_state', Unicode(128), nullable=True) |
|
2898 | 2898 | |
|
2899 | 2899 | author = relationship('User', lazy='joined') |
|
2900 | 2900 | repo = relationship('Repository') |
|
2901 | 2901 | status_change = relationship('ChangesetStatus', cascade="all, delete, delete-orphan") |
|
2902 | 2902 | pull_request = relationship('PullRequest', lazy='joined') |
|
2903 | 2903 | pull_request_version = relationship('PullRequestVersion') |
|
2904 | 2904 | |
|
2905 | 2905 | @classmethod |
|
2906 | 2906 | def get_users(cls, revision=None, pull_request_id=None): |
|
2907 | 2907 | """ |
|
2908 | 2908 | Returns user associated with this ChangesetComment. ie those |
|
2909 | 2909 | who actually commented |
|
2910 | 2910 | |
|
2911 | 2911 | :param cls: |
|
2912 | 2912 | :param revision: |
|
2913 | 2913 | """ |
|
2914 | 2914 | q = Session().query(User)\ |
|
2915 | 2915 | .join(ChangesetComment.author) |
|
2916 | 2916 | if revision: |
|
2917 | 2917 | q = q.filter(cls.revision == revision) |
|
2918 | 2918 | elif pull_request_id: |
|
2919 | 2919 | q = q.filter(cls.pull_request_id == pull_request_id) |
|
2920 | 2920 | return q.all() |
|
2921 | 2921 | |
|
2922 | 2922 | def render(self, mentions=False): |
|
2923 | 2923 | from rhodecode.lib import helpers as h |
|
2924 | 2924 | return h.render(self.text, renderer=self.renderer, mentions=mentions) |
|
2925 | 2925 | |
|
2926 | 2926 | def __repr__(self): |
|
2927 | 2927 | if self.comment_id: |
|
2928 | 2928 | return '<DB:ChangesetComment #%s>' % self.comment_id |
|
2929 | 2929 | else: |
|
2930 | 2930 | return '<DB:ChangesetComment at %#x>' % id(self) |
|
2931 | 2931 | |
|
2932 | 2932 | |
|
2933 | 2933 | class ChangesetStatus(Base, BaseModel): |
|
2934 | 2934 | __tablename__ = 'changeset_statuses' |
|
2935 | 2935 | __table_args__ = ( |
|
2936 | 2936 | Index('cs_revision_idx', 'revision'), |
|
2937 | 2937 | Index('cs_version_idx', 'version'), |
|
2938 | 2938 | UniqueConstraint('repo_id', 'revision', 'version'), |
|
2939 | 2939 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
2940 | 2940 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} |
|
2941 | 2941 | ) |
|
2942 | 2942 | STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed' |
|
2943 | 2943 | STATUS_APPROVED = 'approved' |
|
2944 | 2944 | STATUS_REJECTED = 'rejected' |
|
2945 | 2945 | STATUS_UNDER_REVIEW = 'under_review' |
|
2946 | 2946 | |
|
2947 | 2947 | STATUSES = [ |
|
2948 | 2948 | (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default |
|
2949 | 2949 | (STATUS_APPROVED, _("Approved")), |
|
2950 | 2950 | (STATUS_REJECTED, _("Rejected")), |
|
2951 | 2951 | (STATUS_UNDER_REVIEW, _("Under Review")), |
|
2952 | 2952 | ] |
|
2953 | 2953 | |
|
2954 | 2954 | changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True) |
|
2955 | 2955 | repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False) |
|
2956 | 2956 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None) |
|
2957 | 2957 | revision = Column('revision', String(40), nullable=False) |
|
2958 | 2958 | status = Column('status', String(128), nullable=False, default=DEFAULT) |
|
2959 | 2959 | changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id')) |
|
2960 | 2960 | modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now) |
|
2961 | 2961 | version = Column('version', Integer(), nullable=False, default=0) |
|
2962 | 2962 | pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True) |
|
2963 | 2963 | |
|
2964 | 2964 | author = relationship('User', lazy='joined') |
|
2965 | 2965 | repo = relationship('Repository') |
|
2966 | 2966 | comment = relationship('ChangesetComment', lazy='joined') |
|
2967 | 2967 | pull_request = relationship('PullRequest', lazy='joined') |
|
2968 | 2968 | |
|
2969 | 2969 | def __unicode__(self): |
|
2970 | 2970 | return u"<%s('%s[%s]:%s')>" % ( |
|
2971 | 2971 | self.__class__.__name__, |
|
2972 | 2972 | self.status, self.version, self.author |
|
2973 | 2973 | ) |
|
2974 | 2974 | |
|
2975 | 2975 | @classmethod |
|
2976 | 2976 | def get_status_lbl(cls, value): |
|
2977 | 2977 | return dict(cls.STATUSES).get(value) |
|
2978 | 2978 | |
|
2979 | 2979 | @property |
|
2980 | 2980 | def status_lbl(self): |
|
2981 | 2981 | return ChangesetStatus.get_status_lbl(self.status) |
|
2982 | 2982 | |
|
2983 | 2983 | |
|
2984 | 2984 | class _PullRequestBase(BaseModel): |
|
2985 | 2985 | """ |
|
2986 | 2986 | Common attributes of pull request and version entries. |
|
2987 | 2987 | """ |
|
2988 | 2988 | |
|
2989 | 2989 | # .status values |
|
2990 | 2990 | STATUS_NEW = u'new' |
|
2991 | 2991 | STATUS_OPEN = u'open' |
|
2992 | 2992 | STATUS_CLOSED = u'closed' |
|
2993 | 2993 | |
|
2994 | 2994 | title = Column('title', Unicode(255), nullable=True) |
|
2995 | 2995 | description = Column( |
|
2996 | 2996 | 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), |
|
2997 | 2997 | nullable=True) |
|
2998 | 2998 | # new/open/closed status of pull request (not approve/reject/etc) |
|
2999 | 2999 | status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW) |
|
3000 | 3000 | created_on = Column( |
|
3001 | 3001 | 'created_on', DateTime(timezone=False), nullable=False, |
|
3002 | 3002 | default=datetime.datetime.now) |
|
3003 | 3003 | updated_on = Column( |
|
3004 | 3004 | 'updated_on', DateTime(timezone=False), nullable=False, |
|
3005 | 3005 | default=datetime.datetime.now) |
|
3006 | 3006 | |
|
3007 | 3007 | @declared_attr |
|
3008 | 3008 | def user_id(cls): |
|
3009 | 3009 | return Column( |
|
3010 | 3010 | "user_id", Integer(), ForeignKey('users.user_id'), nullable=False, |
|
3011 | 3011 | unique=None) |
|
3012 | 3012 | |
|
3013 | 3013 | # 500 revisions max |
|
3014 | 3014 | _revisions = Column( |
|
3015 | 3015 | 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql')) |
|
3016 | 3016 | |
|
3017 | 3017 | @declared_attr |
|
3018 | 3018 | def source_repo_id(cls): |
|
3019 | 3019 | # TODO: dan: rename column to source_repo_id |
|
3020 | 3020 | return Column( |
|
3021 | 3021 | 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'), |
|
3022 | 3022 | nullable=False) |
|
3023 | 3023 | |
|
3024 | 3024 | source_ref = Column('org_ref', Unicode(255), nullable=False) |
|
3025 | 3025 | |
|
3026 | 3026 | @declared_attr |
|
3027 | 3027 | def target_repo_id(cls): |
|
3028 | 3028 | # TODO: dan: rename column to target_repo_id |
|
3029 | 3029 | return Column( |
|
3030 | 3030 | 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'), |
|
3031 | 3031 | nullable=False) |
|
3032 | 3032 | |
|
3033 | 3033 | target_ref = Column('other_ref', Unicode(255), nullable=False) |
|
3034 | 3034 | |
|
3035 | 3035 | # TODO: dan: rename column to last_merge_source_rev |
|
3036 | 3036 | _last_merge_source_rev = Column( |
|
3037 | 3037 | 'last_merge_org_rev', String(40), nullable=True) |
|
3038 | 3038 | # TODO: dan: rename column to last_merge_target_rev |
|
3039 | 3039 | _last_merge_target_rev = Column( |
|
3040 | 3040 | 'last_merge_other_rev', String(40), nullable=True) |
|
3041 | 3041 | _last_merge_status = Column('merge_status', Integer(), nullable=True) |
|
3042 | 3042 | merge_rev = Column('merge_rev', String(40), nullable=True) |
|
3043 | 3043 | |
|
3044 | 3044 | @hybrid_property |
|
3045 | 3045 | def revisions(self): |
|
3046 | 3046 | return self._revisions.split(':') if self._revisions else [] |
|
3047 | 3047 | |
|
3048 | 3048 | @revisions.setter |
|
3049 | 3049 | def revisions(self, val): |
|
3050 | 3050 | self._revisions = ':'.join(val) |
|
3051 | 3051 | |
|
3052 | 3052 | @declared_attr |
|
3053 | 3053 | def author(cls): |
|
3054 | 3054 | return relationship('User', lazy='joined') |
|
3055 | 3055 | |
|
3056 | 3056 | @declared_attr |
|
3057 | 3057 | def source_repo(cls): |
|
3058 | 3058 | return relationship( |
|
3059 | 3059 | 'Repository', |
|
3060 | 3060 | primaryjoin='%s.source_repo_id==Repository.repo_id' % cls.__name__) |
|
3061 | 3061 | |
|
3062 | 3062 | @property |
|
3063 | 3063 | def source_ref_parts(self): |
|
3064 | 3064 | refs = self.source_ref.split(':') |
|
3065 | 3065 | return Reference(refs[0], refs[1], refs[2]) |
|
3066 | 3066 | |
|
3067 | 3067 | @declared_attr |
|
3068 | 3068 | def target_repo(cls): |
|
3069 | 3069 | return relationship( |
|
3070 | 3070 | 'Repository', |
|
3071 | 3071 | primaryjoin='%s.target_repo_id==Repository.repo_id' % cls.__name__) |
|
3072 | 3072 | |
|
3073 | 3073 | @property |
|
3074 | 3074 | def target_ref_parts(self): |
|
3075 | 3075 | refs = self.target_ref.split(':') |
|
3076 | 3076 | return Reference(refs[0], refs[1], refs[2]) |
|
3077 | 3077 | |
|
3078 | 3078 | |
|
3079 | 3079 | class PullRequest(Base, _PullRequestBase): |
|
3080 | 3080 | __tablename__ = 'pull_requests' |
|
3081 | 3081 | __table_args__ = ( |
|
3082 | 3082 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
3083 | 3083 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, |
|
3084 | 3084 | ) |
|
3085 | 3085 | |
|
3086 | 3086 | pull_request_id = Column( |
|
3087 | 3087 | 'pull_request_id', Integer(), nullable=False, primary_key=True) |
|
3088 | 3088 | |
|
3089 | 3089 | def __repr__(self): |
|
3090 | 3090 | if self.pull_request_id: |
|
3091 | 3091 | return '<DB:PullRequest #%s>' % self.pull_request_id |
|
3092 | 3092 | else: |
|
3093 | 3093 | return '<DB:PullRequest at %#x>' % id(self) |
|
3094 | 3094 | |
|
3095 | 3095 | reviewers = relationship('PullRequestReviewers', |
|
3096 | 3096 | cascade="all, delete, delete-orphan") |
|
3097 | 3097 | statuses = relationship('ChangesetStatus') |
|
3098 | 3098 | comments = relationship('ChangesetComment', |
|
3099 | 3099 | cascade="all, delete, delete-orphan") |
|
3100 | 3100 | versions = relationship('PullRequestVersion', |
|
3101 | 3101 | cascade="all, delete, delete-orphan") |
|
3102 | 3102 | |
|
3103 | 3103 | def is_closed(self): |
|
3104 | 3104 | return self.status == self.STATUS_CLOSED |
|
3105 | 3105 | |
|
3106 | 3106 | def get_api_data(self): |
|
3107 | 3107 | from rhodecode.model.pull_request import PullRequestModel |
|
3108 | 3108 | pull_request = self |
|
3109 | 3109 | merge_status = PullRequestModel().merge_status(pull_request) |
|
3110 | 3110 | data = { |
|
3111 | 3111 | 'pull_request_id': pull_request.pull_request_id, |
|
3112 | 3112 | 'url': url('pullrequest_show', repo_name=self.target_repo.repo_name, |
|
3113 | 3113 | pull_request_id=self.pull_request_id, |
|
3114 | 3114 | qualified=True), |
|
3115 | 3115 | 'title': pull_request.title, |
|
3116 | 3116 | 'description': pull_request.description, |
|
3117 | 3117 | 'status': pull_request.status, |
|
3118 | 3118 | 'created_on': pull_request.created_on, |
|
3119 | 3119 | 'updated_on': pull_request.updated_on, |
|
3120 | 3120 | 'commit_ids': pull_request.revisions, |
|
3121 | 3121 | 'review_status': pull_request.calculated_review_status(), |
|
3122 | 3122 | 'mergeable': { |
|
3123 | 3123 | 'status': merge_status[0], |
|
3124 | 3124 | 'message': unicode(merge_status[1]), |
|
3125 | 3125 | }, |
|
3126 | 3126 | 'source': { |
|
3127 | 3127 | 'clone_url': pull_request.source_repo.clone_url(), |
|
3128 | 3128 | 'repository': pull_request.source_repo.repo_name, |
|
3129 | 3129 | 'reference': { |
|
3130 | 3130 | 'name': pull_request.source_ref_parts.name, |
|
3131 | 3131 | 'type': pull_request.source_ref_parts.type, |
|
3132 | 3132 | 'commit_id': pull_request.source_ref_parts.commit_id, |
|
3133 | 3133 | }, |
|
3134 | 3134 | }, |
|
3135 | 3135 | 'target': { |
|
3136 | 3136 | 'clone_url': pull_request.target_repo.clone_url(), |
|
3137 | 3137 | 'repository': pull_request.target_repo.repo_name, |
|
3138 | 3138 | 'reference': { |
|
3139 | 3139 | 'name': pull_request.target_ref_parts.name, |
|
3140 | 3140 | 'type': pull_request.target_ref_parts.type, |
|
3141 | 3141 | 'commit_id': pull_request.target_ref_parts.commit_id, |
|
3142 | 3142 | }, |
|
3143 | 3143 | }, |
|
3144 | 3144 | 'author': pull_request.author.get_api_data(include_secrets=False, |
|
3145 | 3145 | details='basic'), |
|
3146 | 3146 | 'reviewers': [ |
|
3147 | 3147 | { |
|
3148 | 3148 | 'user': reviewer.get_api_data(include_secrets=False, |
|
3149 | 3149 | details='basic'), |
|
3150 | 3150 | 'review_status': st[0][1].status if st else 'not_reviewed', |
|
3151 | 3151 | } |
|
3152 | 3152 | for reviewer, st in pull_request.reviewers_statuses() |
|
3153 | 3153 | ] |
|
3154 | 3154 | } |
|
3155 | 3155 | |
|
3156 | 3156 | return data |
|
3157 | 3157 | |
|
3158 | 3158 | def __json__(self): |
|
3159 | 3159 | return { |
|
3160 | 3160 | 'revisions': self.revisions, |
|
3161 | 3161 | } |
|
3162 | 3162 | |
|
3163 | 3163 | def calculated_review_status(self): |
|
3164 | 3164 | # TODO: anderson: 13.05.15 Used only on templates/my_account_pullrequests.html |
|
3165 | 3165 | # because it's tricky on how to use ChangesetStatusModel from there |
|
3166 | 3166 | warnings.warn("Use calculated_review_status from ChangesetStatusModel", DeprecationWarning) |
|
3167 | 3167 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
3168 | 3168 | return ChangesetStatusModel().calculated_review_status(self) |
|
3169 | 3169 | |
|
3170 | 3170 | def reviewers_statuses(self): |
|
3171 | 3171 | warnings.warn("Use reviewers_statuses from ChangesetStatusModel", DeprecationWarning) |
|
3172 | 3172 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
3173 | 3173 | return ChangesetStatusModel().reviewers_statuses(self) |
|
3174 | 3174 | |
|
3175 | 3175 | |
|
3176 | 3176 | class PullRequestVersion(Base, _PullRequestBase): |
|
3177 | 3177 | __tablename__ = 'pull_request_versions' |
|
3178 | 3178 | __table_args__ = ( |
|
3179 | 3179 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
3180 | 3180 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, |
|
3181 | 3181 | ) |
|
3182 | 3182 | |
|
3183 | 3183 | pull_request_version_id = Column( |
|
3184 | 3184 | 'pull_request_version_id', Integer(), nullable=False, primary_key=True) |
|
3185 | 3185 | pull_request_id = Column( |
|
3186 | 3186 | 'pull_request_id', Integer(), |
|
3187 | 3187 | ForeignKey('pull_requests.pull_request_id'), nullable=False) |
|
3188 | 3188 | pull_request = relationship('PullRequest') |
|
3189 | 3189 | |
|
3190 | 3190 | def __repr__(self): |
|
3191 | 3191 | if self.pull_request_version_id: |
|
3192 | 3192 | return '<DB:PullRequestVersion #%s>' % self.pull_request_version_id |
|
3193 | 3193 | else: |
|
3194 | 3194 | return '<DB:PullRequestVersion at %#x>' % id(self) |
|
3195 | 3195 | |
|
3196 | 3196 | |
|
3197 | 3197 | class PullRequestReviewers(Base, BaseModel): |
|
3198 | 3198 | __tablename__ = 'pull_request_reviewers' |
|
3199 | 3199 | __table_args__ = ( |
|
3200 | 3200 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
3201 | 3201 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, |
|
3202 | 3202 | ) |
|
3203 | 3203 | |
|
3204 | 3204 | def __init__(self, user=None, pull_request=None): |
|
3205 | 3205 | self.user = user |
|
3206 | 3206 | self.pull_request = pull_request |
|
3207 | 3207 | |
|
3208 | 3208 | pull_requests_reviewers_id = Column( |
|
3209 | 3209 | 'pull_requests_reviewers_id', Integer(), nullable=False, |
|
3210 | 3210 | primary_key=True) |
|
3211 | 3211 | pull_request_id = Column( |
|
3212 | 3212 | "pull_request_id", Integer(), |
|
3213 | 3213 | ForeignKey('pull_requests.pull_request_id'), nullable=False) |
|
3214 | 3214 | user_id = Column( |
|
3215 | 3215 | "user_id", Integer(), ForeignKey('users.user_id'), nullable=True) |
|
3216 | 3216 | |
|
3217 | 3217 | user = relationship('User') |
|
3218 | 3218 | pull_request = relationship('PullRequest') |
|
3219 | 3219 | |
|
3220 | 3220 | |
|
3221 | 3221 | class Notification(Base, BaseModel): |
|
3222 | 3222 | __tablename__ = 'notifications' |
|
3223 | 3223 | __table_args__ = ( |
|
3224 | 3224 | Index('notification_type_idx', 'type'), |
|
3225 | 3225 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
3226 | 3226 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, |
|
3227 | 3227 | ) |
|
3228 | 3228 | |
|
3229 | 3229 | TYPE_CHANGESET_COMMENT = u'cs_comment' |
|
3230 | 3230 | TYPE_MESSAGE = u'message' |
|
3231 | 3231 | TYPE_MENTION = u'mention' |
|
3232 | 3232 | TYPE_REGISTRATION = u'registration' |
|
3233 | 3233 | TYPE_PULL_REQUEST = u'pull_request' |
|
3234 | 3234 | TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment' |
|
3235 | 3235 | |
|
3236 | 3236 | notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True) |
|
3237 | 3237 | subject = Column('subject', Unicode(512), nullable=True) |
|
3238 | 3238 | body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True) |
|
3239 | 3239 | created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True) |
|
3240 | 3240 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
3241 | 3241 | type_ = Column('type', Unicode(255)) |
|
3242 | 3242 | |
|
3243 | 3243 | created_by_user = relationship('User') |
|
3244 | 3244 | notifications_to_users = relationship('UserNotification', lazy='joined', |
|
3245 | 3245 | cascade="all, delete, delete-orphan") |
|
3246 | 3246 | |
|
3247 | 3247 | @property |
|
3248 | 3248 | def recipients(self): |
|
3249 | 3249 | return [x.user for x in UserNotification.query()\ |
|
3250 | 3250 | .filter(UserNotification.notification == self)\ |
|
3251 | 3251 | .order_by(UserNotification.user_id.asc()).all()] |
|
3252 | 3252 | |
|
3253 | 3253 | @classmethod |
|
3254 | 3254 | def create(cls, created_by, subject, body, recipients, type_=None): |
|
3255 | 3255 | if type_ is None: |
|
3256 | 3256 | type_ = Notification.TYPE_MESSAGE |
|
3257 | 3257 | |
|
3258 | 3258 | notification = cls() |
|
3259 | 3259 | notification.created_by_user = created_by |
|
3260 | 3260 | notification.subject = subject |
|
3261 | 3261 | notification.body = body |
|
3262 | 3262 | notification.type_ = type_ |
|
3263 | 3263 | notification.created_on = datetime.datetime.now() |
|
3264 | 3264 | |
|
3265 | 3265 | for u in recipients: |
|
3266 | 3266 | assoc = UserNotification() |
|
3267 | 3267 | assoc.notification = notification |
|
3268 | 3268 | |
|
3269 | 3269 | # if created_by is inside recipients mark his notification |
|
3270 | 3270 | # as read |
|
3271 | 3271 | if u.user_id == created_by.user_id: |
|
3272 | 3272 | assoc.read = True |
|
3273 | 3273 | |
|
3274 | 3274 | u.notifications.append(assoc) |
|
3275 | 3275 | Session().add(notification) |
|
3276 | 3276 | |
|
3277 | 3277 | return notification |
|
3278 | 3278 | |
|
3279 | 3279 | @property |
|
3280 | 3280 | def description(self): |
|
3281 | 3281 | from rhodecode.model.notification import NotificationModel |
|
3282 | 3282 | return NotificationModel().make_description(self) |
|
3283 | 3283 | |
|
3284 | 3284 | |
|
3285 | 3285 | class UserNotification(Base, BaseModel): |
|
3286 | 3286 | __tablename__ = 'user_to_notification' |
|
3287 | 3287 | __table_args__ = ( |
|
3288 | 3288 | UniqueConstraint('user_id', 'notification_id'), |
|
3289 | 3289 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
3290 | 3290 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} |
|
3291 | 3291 | ) |
|
3292 | 3292 | user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True) |
|
3293 | 3293 | notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True) |
|
3294 | 3294 | read = Column('read', Boolean, default=False) |
|
3295 | 3295 | sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None) |
|
3296 | 3296 | |
|
3297 | 3297 | user = relationship('User', lazy="joined") |
|
3298 | 3298 | notification = relationship('Notification', lazy="joined", |
|
3299 | 3299 | order_by=lambda: Notification.created_on.desc(),) |
|
3300 | 3300 | |
|
3301 | 3301 | def mark_as_read(self): |
|
3302 | 3302 | self.read = True |
|
3303 | 3303 | Session().add(self) |
|
3304 | 3304 | |
|
3305 | 3305 | |
|
3306 | 3306 | class Gist(Base, BaseModel): |
|
3307 | 3307 | __tablename__ = 'gists' |
|
3308 | 3308 | __table_args__ = ( |
|
3309 | 3309 | Index('g_gist_access_id_idx', 'gist_access_id'), |
|
3310 | 3310 | Index('g_created_on_idx', 'created_on'), |
|
3311 | 3311 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
3312 | 3312 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} |
|
3313 | 3313 | ) |
|
3314 | 3314 | GIST_PUBLIC = u'public' |
|
3315 | 3315 | GIST_PRIVATE = u'private' |
|
3316 | 3316 | DEFAULT_FILENAME = u'gistfile1.txt' |
|
3317 | 3317 | |
|
3318 | 3318 | ACL_LEVEL_PUBLIC = u'acl_public' |
|
3319 | 3319 | ACL_LEVEL_PRIVATE = u'acl_private' |
|
3320 | 3320 | |
|
3321 | 3321 | gist_id = Column('gist_id', Integer(), primary_key=True) |
|
3322 | 3322 | gist_access_id = Column('gist_access_id', Unicode(250)) |
|
3323 | 3323 | gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql')) |
|
3324 | 3324 | gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True) |
|
3325 | 3325 | gist_expires = Column('gist_expires', Float(53), nullable=False) |
|
3326 | 3326 | gist_type = Column('gist_type', Unicode(128), nullable=False) |
|
3327 | 3327 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
3328 | 3328 | modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
3329 | 3329 | acl_level = Column('acl_level', Unicode(128), nullable=True) |
|
3330 | 3330 | |
|
3331 | 3331 | owner = relationship('User') |
|
3332 | 3332 | |
|
3333 | 3333 | def __repr__(self): |
|
3334 | 3334 | return '<Gist:[%s]%s>' % (self.gist_type, self.gist_access_id) |
|
3335 | 3335 | |
|
3336 | 3336 | @classmethod |
|
3337 | 3337 | def get_or_404(cls, id_): |
|
3338 | 3338 | res = cls.query().filter(cls.gist_access_id == id_).scalar() |
|
3339 | 3339 | if not res: |
|
3340 | 3340 | raise HTTPNotFound |
|
3341 | 3341 | return res |
|
3342 | 3342 | |
|
3343 | 3343 | @classmethod |
|
3344 | 3344 | def get_by_access_id(cls, gist_access_id): |
|
3345 | 3345 | return cls.query().filter(cls.gist_access_id == gist_access_id).scalar() |
|
3346 | 3346 | |
|
3347 | 3347 | def gist_url(self): |
|
3348 | 3348 | import rhodecode |
|
3349 | 3349 | alias_url = rhodecode.CONFIG.get('gist_alias_url') |
|
3350 | 3350 | if alias_url: |
|
3351 | 3351 | return alias_url.replace('{gistid}', self.gist_access_id) |
|
3352 | 3352 | |
|
3353 | 3353 | return url('gist', gist_id=self.gist_access_id, qualified=True) |
|
3354 | 3354 | |
|
3355 | 3355 | @classmethod |
|
3356 | 3356 | def base_path(cls): |
|
3357 | 3357 | """ |
|
3358 | 3358 | Returns base path when all gists are stored |
|
3359 | 3359 | |
|
3360 | 3360 | :param cls: |
|
3361 | 3361 | """ |
|
3362 | 3362 | from rhodecode.model.gist import GIST_STORE_LOC |
|
3363 | 3363 | q = Session().query(RhodeCodeUi)\ |
|
3364 | 3364 | .filter(RhodeCodeUi.ui_key == URL_SEP) |
|
3365 | 3365 | q = q.options(FromCache("sql_cache_short", "repository_repo_path")) |
|
3366 | 3366 | return os.path.join(q.one().ui_value, GIST_STORE_LOC) |
|
3367 | 3367 | |
|
3368 | 3368 | def get_api_data(self): |
|
3369 | 3369 | """ |
|
3370 | 3370 | Common function for generating gist related data for API |
|
3371 | 3371 | """ |
|
3372 | 3372 | gist = self |
|
3373 | 3373 | data = { |
|
3374 | 3374 | 'gist_id': gist.gist_id, |
|
3375 | 3375 | 'type': gist.gist_type, |
|
3376 | 3376 | 'access_id': gist.gist_access_id, |
|
3377 | 3377 | 'description': gist.gist_description, |
|
3378 | 3378 | 'url': gist.gist_url(), |
|
3379 | 3379 | 'expires': gist.gist_expires, |
|
3380 | 3380 | 'created_on': gist.created_on, |
|
3381 | 3381 | 'modified_at': gist.modified_at, |
|
3382 | 3382 | 'content': None, |
|
3383 | 3383 | 'acl_level': gist.acl_level, |
|
3384 | 3384 | } |
|
3385 | 3385 | return data |
|
3386 | 3386 | |
|
3387 | 3387 | def __json__(self): |
|
3388 | 3388 | data = dict( |
|
3389 | 3389 | ) |
|
3390 | 3390 | data.update(self.get_api_data()) |
|
3391 | 3391 | return data |
|
3392 | 3392 | # SCM functions |
|
3393 | 3393 | |
|
3394 | 3394 | def scm_instance(self, **kwargs): |
|
3395 | 3395 | full_repo_path = os.path.join(self.base_path(), self.gist_access_id) |
|
3396 | 3396 | return get_vcs_instance( |
|
3397 | 3397 | repo_path=safe_str(full_repo_path), create=False) |
|
3398 | 3398 | |
|
3399 | 3399 | |
|
3400 | 3400 | class DbMigrateVersion(Base, BaseModel): |
|
3401 | 3401 | __tablename__ = 'db_migrate_version' |
|
3402 | 3402 | __table_args__ = ( |
|
3403 | 3403 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
3404 | 3404 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, |
|
3405 | 3405 | ) |
|
3406 | 3406 | repository_id = Column('repository_id', String(250), primary_key=True) |
|
3407 | 3407 | repository_path = Column('repository_path', Text) |
|
3408 | 3408 | version = Column('version', Integer) |
|
3409 | 3409 | |
|
3410 | 3410 | |
|
3411 | 3411 | class ExternalIdentity(Base, BaseModel): |
|
3412 | 3412 | __tablename__ = 'external_identities' |
|
3413 | 3413 | __table_args__ = ( |
|
3414 | 3414 | Index('local_user_id_idx', 'local_user_id'), |
|
3415 | 3415 | Index('external_id_idx', 'external_id'), |
|
3416 | 3416 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
3417 | 3417 | 'mysql_charset': 'utf8'}) |
|
3418 | 3418 | |
|
3419 | 3419 | external_id = Column('external_id', Unicode(255), default=u'', |
|
3420 | 3420 | primary_key=True) |
|
3421 | 3421 | external_username = Column('external_username', Unicode(1024), default=u'') |
|
3422 | 3422 | local_user_id = Column('local_user_id', Integer(), |
|
3423 | 3423 | ForeignKey('users.user_id'), primary_key=True) |
|
3424 | 3424 | provider_name = Column('provider_name', Unicode(255), default=u'', |
|
3425 | 3425 | primary_key=True) |
|
3426 | 3426 | access_token = Column('access_token', String(1024), default=u'') |
|
3427 | 3427 | alt_token = Column('alt_token', String(1024), default=u'') |
|
3428 | 3428 | token_secret = Column('token_secret', String(1024), default=u'') |
|
3429 | 3429 | |
|
3430 | 3430 | @classmethod |
|
3431 | 3431 | def by_external_id_and_provider(cls, external_id, provider_name, |
|
3432 | 3432 | local_user_id=None): |
|
3433 | 3433 | """ |
|
3434 | 3434 | Returns ExternalIdentity instance based on search params |
|
3435 | 3435 | |
|
3436 | 3436 | :param external_id: |
|
3437 | 3437 | :param provider_name: |
|
3438 | 3438 | :return: ExternalIdentity |
|
3439 | 3439 | """ |
|
3440 | 3440 | query = cls.query() |
|
3441 | 3441 | query = query.filter(cls.external_id == external_id) |
|
3442 | 3442 | query = query.filter(cls.provider_name == provider_name) |
|
3443 | 3443 | if local_user_id: |
|
3444 | 3444 | query = query.filter(cls.local_user_id == local_user_id) |
|
3445 | 3445 | return query.first() |
|
3446 | 3446 | |
|
3447 | 3447 | @classmethod |
|
3448 | 3448 | def user_by_external_id_and_provider(cls, external_id, provider_name): |
|
3449 | 3449 | """ |
|
3450 | 3450 | Returns User instance based on search params |
|
3451 | 3451 | |
|
3452 | 3452 | :param external_id: |
|
3453 | 3453 | :param provider_name: |
|
3454 | 3454 | :return: User |
|
3455 | 3455 | """ |
|
3456 | 3456 | query = User.query() |
|
3457 | 3457 | query = query.filter(cls.external_id == external_id) |
|
3458 | 3458 | query = query.filter(cls.provider_name == provider_name) |
|
3459 | 3459 | query = query.filter(User.user_id == cls.local_user_id) |
|
3460 | 3460 | return query.first() |
|
3461 | 3461 | |
|
3462 | 3462 | @classmethod |
|
3463 | 3463 | def by_local_user_id(cls, local_user_id): |
|
3464 | 3464 | """ |
|
3465 | 3465 | Returns all tokens for user |
|
3466 | 3466 | |
|
3467 | 3467 | :param local_user_id: |
|
3468 | 3468 | :return: ExternalIdentity |
|
3469 | 3469 | """ |
|
3470 | 3470 | query = cls.query() |
|
3471 | 3471 | query = query.filter(cls.local_user_id == local_user_id) |
|
3472 | 3472 | return query |
|
3473 | 3473 | |
|
3474 | 3474 | |
|
3475 | 3475 | class Integration(Base, BaseModel): |
|
3476 | 3476 | __tablename__ = 'integrations' |
|
3477 | 3477 | __table_args__ = ( |
|
3478 | 3478 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
3479 | 3479 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} |
|
3480 | 3480 | ) |
|
3481 | 3481 | |
|
3482 | 3482 | integration_id = Column('integration_id', Integer(), primary_key=True) |
|
3483 | 3483 | integration_type = Column('integration_type', String(255)) |
|
3484 | 3484 | enabled = Column('enabled', Boolean(), nullable=False) |
|
3485 | 3485 | name = Column('name', String(255), nullable=False) |
|
3486 | 3486 | child_repos_only = Column('child_repos_only', Boolean(), nullable=False, |
|
3487 | 3487 | default=False) |
|
3488 | 3488 | |
|
3489 | 3489 | settings = Column( |
|
3490 | 3490 | 'settings_json', MutationObj.as_mutable( |
|
3491 | 3491 | JsonType(dialect_map=dict(mysql=UnicodeText(16384))))) |
|
3492 | 3492 | repo_id = Column( |
|
3493 | 3493 | 'repo_id', Integer(), ForeignKey('repositories.repo_id'), |
|
3494 | 3494 | nullable=True, unique=None, default=None) |
|
3495 | 3495 | repo = relationship('Repository', lazy='joined') |
|
3496 | 3496 | |
|
3497 | 3497 | repo_group_id = Column( |
|
3498 | 3498 | 'repo_group_id', Integer(), ForeignKey('groups.group_id'), |
|
3499 | 3499 | nullable=True, unique=None, default=None) |
|
3500 | 3500 | repo_group = relationship('RepoGroup', lazy='joined') |
|
3501 | 3501 | |
|
3502 |
@ |
|
|
3502 | @property | |
|
3503 | 3503 | def scope(self): |
|
3504 | 3504 | if self.repo: |
|
3505 | return self.repo | |
|
3505 | return repr(self.repo) | |
|
3506 | 3506 | if self.repo_group: |
|
3507 |
|
|
|
3507 | if self.child_repos_only: | |
|
3508 | return repr(self.repo_group) + ' (child repos only)' | |
|
3509 | else: | |
|
3510 | return repr(self.repo_group) + ' (recursive)' | |
|
3508 | 3511 | if self.child_repos_only: |
|
3509 | 3512 | return 'root_repos' |
|
3510 | 3513 | return 'global' |
|
3511 | 3514 | |
|
3512 | @scope.setter | |
|
3513 | def scope(self, value): | |
|
3514 | self.repo = None | |
|
3515 | self.repo_id = None | |
|
3516 | self.repo_group_id = None | |
|
3517 | self.repo_group = None | |
|
3518 | self.child_repos_only = False | |
|
3519 | if isinstance(value, Repository): | |
|
3520 | self.repo_id = value.repo_id | |
|
3521 | self.repo = value | |
|
3522 | elif isinstance(value, RepoGroup): | |
|
3523 | self.repo_group_id = value.group_id | |
|
3524 | self.repo_group = value | |
|
3525 | elif value == 'root_repos': | |
|
3526 | self.child_repos_only = True | |
|
3527 | elif value == 'global': | |
|
3528 | pass | |
|
3529 | else: | |
|
3530 | raise Exception("invalid scope: %s, must be one of " | |
|
3531 | "['global', 'root_repos', <RepoGroup>. <Repository>]" % value) | |
|
3532 | ||
|
3533 | 3515 | def __repr__(self): |
|
3534 | 3516 | return '<Integration(%r, %r)>' % (self.integration_type, self.scope) |
@@ -1,213 +1,222 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2011-2016 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | |
|
22 | 22 | """ |
|
23 | 23 | Model for integrations |
|
24 | 24 | """ |
|
25 | 25 | |
|
26 | 26 | |
|
27 | 27 | import logging |
|
28 | 28 | import traceback |
|
29 | 29 | |
|
30 | 30 | from pylons import tmpl_context as c |
|
31 | 31 | from pylons.i18n.translation import _, ungettext |
|
32 | 32 | from sqlalchemy import or_, and_ |
|
33 | 33 | from sqlalchemy.sql.expression import false, true |
|
34 | 34 | from mako import exceptions |
|
35 | 35 | |
|
36 | 36 | import rhodecode |
|
37 | 37 | from rhodecode import events |
|
38 | 38 | from rhodecode.lib import helpers as h |
|
39 | 39 | from rhodecode.lib.caching_query import FromCache |
|
40 | 40 | from rhodecode.lib.utils import PartialRenderer |
|
41 | 41 | from rhodecode.model import BaseModel |
|
42 | 42 | from rhodecode.model.db import Integration, User, Repository, RepoGroup |
|
43 | 43 | from rhodecode.model.meta import Session |
|
44 | 44 | from rhodecode.integrations import integration_type_registry |
|
45 | 45 | from rhodecode.integrations.types.base import IntegrationTypeBase |
|
46 | 46 | |
|
47 | 47 | log = logging.getLogger(__name__) |
|
48 | 48 | |
|
49 | 49 | |
|
50 | 50 | class IntegrationModel(BaseModel): |
|
51 | 51 | |
|
52 | 52 | cls = Integration |
|
53 | 53 | |
|
54 | 54 | def __get_integration(self, integration): |
|
55 | 55 | if isinstance(integration, Integration): |
|
56 | 56 | return integration |
|
57 | 57 | elif isinstance(integration, (int, long)): |
|
58 | 58 | return self.sa.query(Integration).get(integration) |
|
59 | 59 | else: |
|
60 | 60 | if integration: |
|
61 | 61 | raise Exception('integration must be int, long or Instance' |
|
62 | 62 | ' of Integration got %s' % type(integration)) |
|
63 | 63 | |
|
64 |
def create(self, IntegrationType, name, enabled, |
|
|
64 | def create(self, IntegrationType, name, enabled, repo, repo_group, | |
|
65 | child_repos_only, settings): | |
|
65 | 66 | """ Create an IntegrationType integration """ |
|
66 | 67 | integration = Integration() |
|
67 | 68 | integration.integration_type = IntegrationType.key |
|
68 | 69 | self.sa.add(integration) |
|
69 |
self.update_integration(integration, name, enabled, |
|
|
70 | self.update_integration(integration, name, enabled, repo, repo_group, | |
|
71 | child_repos_only, settings) | |
|
70 | 72 | self.sa.commit() |
|
71 | 73 | return integration |
|
72 | 74 | |
|
73 |
def update_integration(self, integration, name, enabled, |
|
|
74 | """ | |
|
75 | :param scope: one of ['global', 'root_repos', <RepoGroup>. <Repository>] | |
|
76 | """ | |
|
77 | ||
|
75 | def update_integration(self, integration, name, enabled, repo, repo_group, | |
|
76 | child_repos_only, settings): | |
|
78 | 77 | integration = self.__get_integration(integration) |
|
79 | 78 | |
|
80 |
integration. |
|
|
79 | integration.repo = repo | |
|
80 | integration.repo_group = repo_group | |
|
81 | integration.child_repos_only = child_repos_only | |
|
81 | 82 | integration.name = name |
|
82 | 83 | integration.enabled = enabled |
|
83 | 84 | integration.settings = settings |
|
84 | 85 | |
|
85 | 86 | return integration |
|
86 | 87 | |
|
87 | 88 | def delete(self, integration): |
|
88 | 89 | integration = self.__get_integration(integration) |
|
89 | 90 | if integration: |
|
90 | 91 | self.sa.delete(integration) |
|
91 | 92 | return True |
|
92 | 93 | return False |
|
93 | 94 | |
|
94 | 95 | def get_integration_handler(self, integration): |
|
95 | 96 | TypeClass = integration_type_registry.get(integration.integration_type) |
|
96 | 97 | if not TypeClass: |
|
97 | 98 | log.error('No class could be found for integration type: {}'.format( |
|
98 | 99 | integration.integration_type)) |
|
99 | 100 | return None |
|
100 | 101 | |
|
101 | 102 | return TypeClass(integration.settings) |
|
102 | 103 | |
|
103 | 104 | def send_event(self, integration, event): |
|
104 | 105 | """ Send an event to an integration """ |
|
105 | 106 | handler = self.get_integration_handler(integration) |
|
106 | 107 | if handler: |
|
107 | 108 | handler.send_event(event) |
|
108 | 109 | |
|
109 | 110 | def get_integrations(self, scope, IntegrationType=None): |
|
110 | 111 | """ |
|
111 | 112 | Return integrations for a scope, which must be one of: |
|
112 | 113 | |
|
113 | 114 | 'all' - every integration, global/repogroup/repo |
|
114 | 115 | 'global' - global integrations only |
|
115 | 116 | <Repository> instance - integrations for this repo only |
|
116 | 117 | <RepoGroup> instance - integrations for this repogroup only |
|
117 | 118 | """ |
|
118 | 119 | |
|
119 | 120 | if isinstance(scope, Repository): |
|
120 | 121 | query = self.sa.query(Integration).filter( |
|
121 | 122 | Integration.repo==scope) |
|
122 | 123 | elif isinstance(scope, RepoGroup): |
|
123 | 124 | query = self.sa.query(Integration).filter( |
|
124 | 125 | Integration.repo_group==scope) |
|
125 | 126 | elif scope == 'global': |
|
126 | 127 | # global integrations |
|
127 | 128 | query = self.sa.query(Integration).filter( |
|
128 | 129 | and_(Integration.repo_id==None, Integration.repo_group_id==None) |
|
129 | 130 | ) |
|
130 |
elif scope == 'root |
|
|
131 | elif scope == 'root-repos': | |
|
131 | 132 | query = self.sa.query(Integration).filter( |
|
132 | 133 | and_(Integration.repo_id==None, |
|
133 | 134 | Integration.repo_group_id==None, |
|
134 | 135 | Integration.child_repos_only==True) |
|
135 | 136 | ) |
|
136 | 137 | elif scope == 'all': |
|
137 | 138 | query = self.sa.query(Integration) |
|
138 | 139 | else: |
|
139 | 140 | raise Exception( |
|
140 | 141 | "invalid `scope`, must be one of: " |
|
141 | 142 | "['global', 'all', <Repository>, <RepoGroup>]") |
|
142 | 143 | |
|
143 | 144 | if IntegrationType is not None: |
|
144 | 145 | query = query.filter( |
|
145 | 146 | Integration.integration_type==IntegrationType.key) |
|
146 | 147 | |
|
147 | 148 | result = [] |
|
148 | 149 | for integration in query.all(): |
|
149 | 150 | IntType = integration_type_registry.get(integration.integration_type) |
|
150 | 151 | result.append((IntType, integration)) |
|
151 | 152 | return result |
|
152 | 153 | |
|
153 | 154 | def get_for_event(self, event, cache=False): |
|
154 | 155 | """ |
|
155 | 156 | Get integrations that match an event |
|
156 | 157 | """ |
|
157 | 158 | query = self.sa.query( |
|
158 | 159 | Integration |
|
159 | 160 | ).filter( |
|
160 | 161 | Integration.enabled==True |
|
161 | 162 | ) |
|
162 | 163 | |
|
163 | 164 | global_integrations_filter = and_( |
|
164 | 165 | Integration.repo_id==None, |
|
165 | 166 | Integration.repo_group_id==None, |
|
166 | 167 | Integration.child_repos_only==False, |
|
167 | 168 | ) |
|
168 | 169 | |
|
169 | 170 | if isinstance(event, events.RepoEvent): |
|
170 | 171 | root_repos_integrations_filter = and_( |
|
171 | 172 | Integration.repo_id==None, |
|
172 | 173 | Integration.repo_group_id==None, |
|
173 | 174 | Integration.child_repos_only==True, |
|
174 | 175 | ) |
|
175 | 176 | |
|
176 | 177 | clauses = [ |
|
177 | 178 | global_integrations_filter, |
|
178 | 179 | ] |
|
179 | 180 | |
|
180 | 181 | # repo integrations |
|
181 | 182 | if event.repo.repo_id: # pre create events dont have a repo_id yet |
|
182 | 183 | clauses.append( |
|
183 | 184 | Integration.repo_id==event.repo.repo_id |
|
184 | 185 | ) |
|
185 | 186 | |
|
186 | 187 | if event.repo.group: |
|
187 | 188 | clauses.append( |
|
188 | Integration.repo_group_id == event.repo.group.group_id | |
|
189 | and_( | |
|
190 | Integration.repo_group_id==event.repo.group.group_id, | |
|
191 | Integration.child_repos_only==True | |
|
192 | ) | |
|
189 | 193 | ) |
|
190 |
# repo group cascade to kids |
|
|
191 |
|
|
|
192 | # [group.group_id for group in | |
|
193 |
|
|
|
194 | # )) | |
|
194 | # repo group cascade to kids | |
|
195 | clauses.append( | |
|
196 | and_( | |
|
197 | Integration.repo_group_id.in_( | |
|
198 | [group.group_id for group in | |
|
199 | event.repo.groups_with_parents] | |
|
200 | ), | |
|
201 | Integration.child_repos_only==False | |
|
202 | ) | |
|
203 | ) | |
|
195 | 204 | |
|
196 | 205 | |
|
197 | 206 | if not event.repo.group: # root repo |
|
198 | 207 | clauses.append(root_repos_integrations_filter) |
|
199 | 208 | |
|
200 | 209 | query = query.filter(or_(*clauses)) |
|
201 | 210 | |
|
202 | 211 | if cache: |
|
203 | 212 | query = query.options(FromCache( |
|
204 | 213 | "sql_cache_short", |
|
205 | 214 | "get_enabled_repo_integrations_%i" % event.repo.repo_id)) |
|
206 | 215 | else: # only global integrations |
|
207 | 216 | query = query.filter(global_integrations_filter) |
|
208 | 217 | if cache: |
|
209 | 218 | query = query.options(FromCache( |
|
210 | 219 | "sql_cache_short", "get_enabled_global_integrations")) |
|
211 | 220 | |
|
212 | 221 | result = query.all() |
|
213 | 222 | return result No newline at end of file |
@@ -1,187 +1,226 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2016-2016 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import os |
|
22 | 22 | |
|
23 | 23 | import deform |
|
24 | 24 | import colander |
|
25 | 25 | |
|
26 | 26 | from rhodecode.translation import _ |
|
27 | 27 | from rhodecode.model.db import Repository, RepoGroup |
|
28 | 28 | from rhodecode.model.validation_schema import validators, preparers |
|
29 | 29 | |
|
30 | 30 | |
|
31 | 31 | def integration_scope_choices(permissions): |
|
32 | 32 | """ |
|
33 | 33 | Return list of (value, label) choices for integration scopes depending on |
|
34 | 34 | the permissions |
|
35 | 35 | """ |
|
36 | 36 | result = [('', _('Pick a scope:'))] |
|
37 | 37 | if 'hg.admin' in permissions['global']: |
|
38 | 38 | result.extend([ |
|
39 | 39 | ('global', _('Global (all repositories)')), |
|
40 |
('root |
|
|
40 | ('root-repos', _('Top level repositories only')), | |
|
41 | 41 | ]) |
|
42 | 42 | |
|
43 | 43 | repo_choices = [ |
|
44 | 44 | ('repo:%s' % repo_name, '/' + repo_name) |
|
45 | 45 | for repo_name, repo_perm |
|
46 | 46 | in permissions['repositories'].items() |
|
47 | 47 | if repo_perm == 'repository.admin' |
|
48 | 48 | ] |
|
49 | 49 | repogroup_choices = [ |
|
50 |
('repogroup:%s' % repo_group_name, '/' + repo_group_name + ' |
|
|
50 | ('repogroup:%s' % repo_group_name, '/' + repo_group_name + '/ (child repos only)') | |
|
51 | for repo_group_name, repo_group_perm | |
|
52 | in permissions['repositories_groups'].items() | |
|
53 | if repo_group_perm == 'group.admin' | |
|
54 | ] | |
|
55 | repogroup_recursive_choices = [ | |
|
56 | ('repogroup-recursive:%s' % repo_group_name, '/' + repo_group_name + '/ (recursive)') | |
|
51 | 57 | for repo_group_name, repo_group_perm |
|
52 | 58 | in permissions['repositories_groups'].items() |
|
53 | 59 | if repo_group_perm == 'group.admin' |
|
54 | 60 | ] |
|
55 | 61 | result.extend( |
|
56 | sorted(repogroup_choices + repo_choices, | |
|
62 | sorted(repogroup_recursive_choices + repogroup_choices + repo_choices, | |
|
57 | 63 | key=lambda (choice, label): choice.split(':', 1)[1] |
|
58 | 64 | ) |
|
59 | 65 | ) |
|
60 | 66 | return result |
|
61 | 67 | |
|
62 | 68 | |
|
63 | 69 | @colander.deferred |
|
64 | 70 | def deferred_integration_scopes_validator(node, kw): |
|
65 | 71 | perms = kw.get('permissions') |
|
66 | 72 | def _scope_validator(_node, scope): |
|
67 | 73 | is_super_admin = 'hg.admin' in perms['global'] |
|
68 | 74 | |
|
69 |
if scope |
|
|
75 | if scope.get('repo'): | |
|
76 | if (is_super_admin or perms['repositories'].get( | |
|
77 | scope['repo'].repo_name) == 'repository.admin'): | |
|
78 | return True | |
|
79 | msg = _('Only repo admins can create integrations') | |
|
80 | raise colander.Invalid(_node, msg) | |
|
81 | elif scope.get('repo_group'): | |
|
82 | if (is_super_admin or perms['repositories_groups'].get( | |
|
83 | scope['repo_group'].group_name) == 'group.admin'): | |
|
84 | return True | |
|
85 | ||
|
86 | msg = _('Only repogroup admins can create integrations') | |
|
87 | raise colander.Invalid(_node, msg) | |
|
88 | else: | |
|
70 | 89 | if is_super_admin: |
|
71 | 90 | return True |
|
72 | 91 | msg = _('Only superadmins can create global integrations') |
|
73 | 92 | raise colander.Invalid(_node, msg) |
|
74 | elif isinstance(scope, Repository): | |
|
75 | if (is_super_admin or perms['repositories'].get( | |
|
76 | scope.repo_name) == 'repository.admin'): | |
|
77 | return True | |
|
78 | msg = _('Only repo admins can create integrations') | |
|
79 | raise colander.Invalid(_node, msg) | |
|
80 | elif isinstance(scope, RepoGroup): | |
|
81 | if (is_super_admin or perms['repositories_groups'].get( | |
|
82 | scope.group_name) == 'group.admin'): | |
|
83 | return True | |
|
84 | ||
|
85 | msg = _('Only repogroup admins can create integrations') | |
|
86 | raise colander.Invalid(_node, msg) | |
|
87 | ||
|
88 | msg = _('Invalid integration scope: %s' % scope) | |
|
89 | raise colander.Invalid(node, msg) | |
|
90 | 93 | |
|
91 | 94 | return _scope_validator |
|
92 | 95 | |
|
93 | 96 | |
|
94 | 97 | @colander.deferred |
|
95 | 98 | def deferred_integration_scopes_widget(node, kw): |
|
96 | 99 | if kw.get('no_scope'): |
|
97 | 100 | return deform.widget.TextInputWidget(readonly=True) |
|
98 | 101 | |
|
99 | 102 | choices = integration_scope_choices(kw.get('permissions')) |
|
100 | 103 | widget = deform.widget.Select2Widget(values=choices) |
|
101 | 104 | return widget |
|
102 | 105 | |
|
103 | class IntegrationScope(colander.SchemaType): | |
|
106 | ||
|
107 | class IntegrationScopeType(colander.SchemaType): | |
|
104 | 108 | def serialize(self, node, appstruct): |
|
105 | 109 | if appstruct is colander.null: |
|
106 | 110 | return colander.null |
|
107 | 111 | |
|
108 |
if |
|
|
109 | return 'repo:%s' % appstruct.repo_name | |
|
110 |
elif |
|
|
111 | return 'repogroup:%s' % appstruct.group_name | |
|
112 | elif appstruct in ('global', 'root_repos'): | |
|
113 |
|
|
|
112 | if appstruct.get('repo'): | |
|
113 | return 'repo:%s' % appstruct['repo'].repo_name | |
|
114 | elif appstruct.get('repo_group'): | |
|
115 | if appstruct.get('child_repos_only'): | |
|
116 | return 'repogroup:%s' % appstruct['repo_group'].group_name | |
|
117 | else: | |
|
118 | return 'repogroup-recursive:%s' % ( | |
|
119 | appstruct['repo_group'].group_name) | |
|
120 | else: | |
|
121 | if appstruct.get('child_repos_only'): | |
|
122 | return 'root-repos' | |
|
123 | else: | |
|
124 | return 'global' | |
|
125 | ||
|
114 | 126 | raise colander.Invalid(node, '%r is not a valid scope' % appstruct) |
|
115 | 127 | |
|
116 | 128 | def deserialize(self, node, cstruct): |
|
117 | 129 | if cstruct is colander.null: |
|
118 | 130 | return colander.null |
|
119 | 131 | |
|
120 | 132 | if cstruct.startswith('repo:'): |
|
121 | 133 | repo = Repository.get_by_repo_name(cstruct.split(':')[1]) |
|
122 | 134 | if repo: |
|
123 |
return |
|
|
135 | return { | |
|
136 | 'repo': repo, | |
|
137 | 'repo_group': None, | |
|
138 | 'child_repos_only': None, | |
|
139 | } | |
|
140 | elif cstruct.startswith('repogroup-recursive:'): | |
|
141 | repo_group = RepoGroup.get_by_group_name(cstruct.split(':')[1]) | |
|
142 | if repo_group: | |
|
143 | return { | |
|
144 | 'repo': None, | |
|
145 | 'repo_group': repo_group, | |
|
146 | 'child_repos_only': False | |
|
147 | } | |
|
124 | 148 | elif cstruct.startswith('repogroup:'): |
|
125 | 149 | repo_group = RepoGroup.get_by_group_name(cstruct.split(':')[1]) |
|
126 | 150 | if repo_group: |
|
127 |
return |
|
|
128 | elif cstruct in ('global', 'root_repos'): | |
|
129 | return cstruct | |
|
151 | return { | |
|
152 | 'repo': None, | |
|
153 | 'repo_group': repo_group, | |
|
154 | 'child_repos_only': True | |
|
155 | } | |
|
156 | elif cstruct == 'global': | |
|
157 | return { | |
|
158 | 'repo': None, | |
|
159 | 'repo_group': None, | |
|
160 | 'child_repos_only': False | |
|
161 | } | |
|
162 | elif cstruct == 'root-repos': | |
|
163 | return { | |
|
164 | 'repo': None, | |
|
165 | 'repo_group': None, | |
|
166 | 'child_repos_only': True | |
|
167 | } | |
|
130 | 168 | |
|
131 | 169 | raise colander.Invalid(node, '%r is not a valid scope' % cstruct) |
|
132 | 170 | |
|
171 | ||
|
133 | 172 | class IntegrationOptionsSchemaBase(colander.MappingSchema): |
|
134 | 173 | |
|
135 | 174 | name = colander.SchemaNode( |
|
136 | 175 | colander.String(), |
|
137 | 176 | description=_('Short name for this integration.'), |
|
138 | 177 | missing=colander.required, |
|
139 | 178 | title=_('Integration name'), |
|
140 | 179 | ) |
|
141 | 180 | |
|
142 | 181 | scope = colander.SchemaNode( |
|
143 | IntegrationScope(), | |
|
182 | IntegrationScopeType(), | |
|
144 | 183 | description=_( |
|
145 |
'Scope of the integration. |
|
|
146 |
' runs on all |
|
|
184 | 'Scope of the integration. Recursive means the integration ' | |
|
185 | ' runs on all repos of that group and children recursively.'), | |
|
147 | 186 | title=_('Integration scope'), |
|
148 | 187 | validator=deferred_integration_scopes_validator, |
|
149 | 188 | widget=deferred_integration_scopes_widget, |
|
150 | 189 | missing=colander.required, |
|
151 | 190 | ) |
|
152 | 191 | |
|
153 | 192 | enabled = colander.SchemaNode( |
|
154 | 193 | colander.Bool(), |
|
155 | 194 | default=True, |
|
156 | 195 | description=_('Enable or disable this integration.'), |
|
157 | 196 | missing=False, |
|
158 | 197 | title=_('Enabled'), |
|
159 | 198 | ) |
|
160 | 199 | |
|
161 | 200 | |
|
162 | 201 | |
|
163 | 202 | def make_integration_schema(IntegrationType, settings=None): |
|
164 | 203 | """ |
|
165 | 204 | Return a colander schema for an integration type |
|
166 | 205 | |
|
167 | 206 | :param IntegrationType: the integration type class |
|
168 | 207 | :param settings: existing integration settings dict (optional) |
|
169 | 208 | """ |
|
170 | 209 | |
|
171 | 210 | settings = settings or {} |
|
172 | 211 | settings_schema = IntegrationType(settings=settings).settings_schema() |
|
173 | 212 | |
|
174 | 213 | class IntegrationSchema(colander.Schema): |
|
175 | 214 | options = IntegrationOptionsSchemaBase() |
|
176 | 215 | |
|
177 | 216 | schema = IntegrationSchema() |
|
178 | 217 | schema['options'].title = _('General integration options') |
|
179 | 218 | |
|
180 | 219 | settings_schema.name = 'settings' |
|
181 | 220 | settings_schema.title = _('{integration_type} settings').format( |
|
182 | 221 | integration_type=IntegrationType.display_name) |
|
183 | 222 | schema.add(settings_schema) |
|
184 | 223 | |
|
185 | 224 | return schema |
|
186 | 225 | |
|
187 | 226 |
@@ -1,249 +1,252 b'' | |||
|
1 | 1 | ## -*- coding: utf-8 -*- |
|
2 | 2 | <%inherit file="base.html"/> |
|
3 | 3 | |
|
4 | 4 | <%def name="breadcrumbs_links()"> |
|
5 | 5 | %if c.repo: |
|
6 | 6 | ${h.link_to('Settings',h.url('edit_repo', repo_name=c.repo.repo_name))} |
|
7 | 7 | %elif c.repo_group: |
|
8 | 8 | ${h.link_to(_('Admin'),h.url('admin_home'))} |
|
9 | 9 | » |
|
10 | 10 | ${h.link_to(_('Repository Groups'),h.url('repo_groups'))} |
|
11 | 11 | » |
|
12 | 12 | ${h.link_to(c.repo_group.group_name,h.url('edit_repo_group', group_name=c.repo_group.group_name))} |
|
13 | 13 | %else: |
|
14 | 14 | ${h.link_to(_('Admin'),h.url('admin_home'))} |
|
15 | 15 | » |
|
16 | 16 | ${h.link_to(_('Settings'),h.url('admin_settings'))} |
|
17 | 17 | %endif |
|
18 | 18 | %if current_IntegrationType: |
|
19 | 19 | » |
|
20 | 20 | %if c.repo: |
|
21 | 21 | ${h.link_to(_('Integrations'), |
|
22 | 22 | request.route_url(route_name='repo_integrations_home', |
|
23 | 23 | repo_name=c.repo.repo_name))} |
|
24 | 24 | %elif c.repo_group: |
|
25 | 25 | ${h.link_to(_('Integrations'), |
|
26 | 26 | request.route_url(route_name='repo_group_integrations_home', |
|
27 | 27 | repo_group_name=c.repo_group.group_name))} |
|
28 | 28 | %else: |
|
29 | 29 | ${h.link_to(_('Integrations'), |
|
30 | 30 | request.route_url(route_name='global_integrations_home'))} |
|
31 | 31 | %endif |
|
32 | 32 | » |
|
33 | 33 | ${current_IntegrationType.display_name} |
|
34 | 34 | %else: |
|
35 | 35 | » |
|
36 | 36 | ${_('Integrations')} |
|
37 | 37 | %endif |
|
38 | 38 | </%def> |
|
39 | 39 | |
|
40 | 40 | <div class="panel panel-default"> |
|
41 | 41 | <div class="panel-heading"> |
|
42 | 42 | <h3 class="panel-title"> |
|
43 | 43 | %if c.repo: |
|
44 | 44 | ${_('Current Integrations for Repository: {repo_name}').format(repo_name=c.repo.repo_name)} |
|
45 | 45 | %elif c.repo_group: |
|
46 | 46 | ${_('Current Integrations for repository group: {repo_group_name}').format(repo_group_name=c.repo_group.group_name)} |
|
47 | 47 | %else: |
|
48 | 48 | ${_('Current Integrations')} |
|
49 | 49 | %endif |
|
50 | 50 | </h3> |
|
51 | 51 | </div> |
|
52 | 52 | <div class="panel-body"> |
|
53 | 53 | <% |
|
54 | 54 | if c.repo: |
|
55 | 55 | home_url = request.route_path('repo_integrations_home', |
|
56 | 56 | repo_name=c.repo.repo_name) |
|
57 | 57 | elif c.repo_group: |
|
58 | 58 | home_url = request.route_path('repo_group_integrations_home', |
|
59 | 59 | repo_group_name=c.repo_group.group_name) |
|
60 | 60 | else: |
|
61 | 61 | home_url = request.route_path('global_integrations_home') |
|
62 | 62 | %> |
|
63 | 63 | |
|
64 | 64 | <a href="${home_url}" class="btn ${not current_IntegrationType and 'btn-primary' or ''}">${_('All')}</a> |
|
65 | 65 | |
|
66 | 66 | %for integration_key, IntegrationType in available_integrations.items(): |
|
67 | 67 | <% |
|
68 | 68 | if c.repo: |
|
69 | 69 | list_url = request.route_path('repo_integrations_list', |
|
70 | 70 | repo_name=c.repo.repo_name, |
|
71 | 71 | integration=integration_key) |
|
72 | 72 | elif c.repo_group: |
|
73 | 73 | list_url = request.route_path('repo_group_integrations_list', |
|
74 | 74 | repo_group_name=c.repo_group.group_name, |
|
75 | 75 | integration=integration_key) |
|
76 | 76 | else: |
|
77 | 77 | list_url = request.route_path('global_integrations_list', |
|
78 | 78 | integration=integration_key) |
|
79 | 79 | %> |
|
80 | 80 | <a href="${list_url}" |
|
81 | 81 | class="btn ${current_IntegrationType and integration_key == current_IntegrationType.key and 'btn-primary' or ''}"> |
|
82 | 82 | ${IntegrationType.display_name} |
|
83 | 83 | </a> |
|
84 | 84 | %endfor |
|
85 | 85 | |
|
86 | 86 | <% |
|
87 | 87 | if c.repo: |
|
88 | 88 | create_url = h.route_path('repo_integrations_new', repo_name=c.repo.repo_name) |
|
89 | 89 | elif c.repo_group: |
|
90 | 90 | create_url = h.route_path('repo_group_integrations_new', repo_group_name=c.repo_group.group_name) |
|
91 | 91 | else: |
|
92 | 92 | create_url = h.route_path('global_integrations_new') |
|
93 | 93 | %> |
|
94 | 94 | <p class="pull-right"> |
|
95 | 95 | <a href="${create_url}" class="btn btn-small btn-success">${_(u'Create new integration')}</a> |
|
96 | 96 | </p> |
|
97 | 97 | |
|
98 | 98 | <table class="rctable integrations"> |
|
99 | 99 | <thead> |
|
100 | 100 | <tr> |
|
101 | 101 | <th><a href="?sort=enabled:${rev_sort_dir}">${_('Enabled')}</a></th> |
|
102 | 102 | <th><a href="?sort=name:${rev_sort_dir}">${_('Name')}</a></th> |
|
103 | 103 | <th colspan="2"><a href="?sort=integration_type:${rev_sort_dir}">${_('Type')}</a></th> |
|
104 | 104 | <th><a href="?sort=scope:${rev_sort_dir}">${_('Scope')}</a></th> |
|
105 | 105 | <th>${_('Actions')}</th> |
|
106 | 106 | <th></th> |
|
107 | 107 | </tr> |
|
108 | 108 | </thead> |
|
109 | 109 | <tbody> |
|
110 | 110 | %if not integrations_list: |
|
111 | 111 | <tr> |
|
112 | 112 | <td colspan="7"> |
|
113 | 113 | <% integration_type = current_IntegrationType and current_IntegrationType.display_name or '' %> |
|
114 | 114 | %if c.repo: |
|
115 | 115 | ${_('No {type} integrations for repo {repo} exist yet.').format(type=integration_type, repo=c.repo.repo_name)} |
|
116 | 116 | %elif c.repo_group: |
|
117 | 117 | ${_('No {type} integrations for repogroup {repogroup} exist yet.').format(type=integration_type, repogroup=c.repo_group.group_name)} |
|
118 | 118 | %else: |
|
119 | 119 | ${_('No {type} integrations exist yet.').format(type=integration_type)} |
|
120 | 120 | %endif |
|
121 | 121 | |
|
122 | 122 | %if current_IntegrationType: |
|
123 | 123 | <% |
|
124 | 124 | if c.repo: |
|
125 | 125 | create_url = h.route_path('repo_integrations_create', repo_name=c.repo.repo_name, integration=current_IntegrationType.key) |
|
126 | 126 | elif c.repo_group: |
|
127 | 127 | create_url = h.route_path('repo_group_integrations_create', repo_group_name=c.repo_group.group_name, integration=current_IntegrationType.key) |
|
128 | 128 | else: |
|
129 | 129 | create_url = h.route_path('global_integrations_create', integration=current_IntegrationType.key) |
|
130 | 130 | %> |
|
131 | 131 | %endif |
|
132 | 132 | |
|
133 | 133 | <a href="${create_url}">${_(u'Create one')}</a> |
|
134 | 134 | </td> |
|
135 | 135 | </tr> |
|
136 | 136 | %endif |
|
137 | 137 | %for IntegrationType, integration in integrations_list: |
|
138 | 138 | <tr id="integration_${integration.integration_id}"> |
|
139 | 139 | <td class="td-enabled"> |
|
140 | 140 | %if integration.enabled: |
|
141 | 141 | <div class="flag_status approved pull-left"></div> |
|
142 | 142 | %else: |
|
143 | 143 | <div class="flag_status rejected pull-left"></div> |
|
144 | 144 | %endif |
|
145 | 145 | </td> |
|
146 | 146 | <td class="td-description"> |
|
147 | 147 | ${integration.name} |
|
148 | 148 | </td> |
|
149 | 149 | <td class="td-icon"> |
|
150 | 150 | %if integration.integration_type in available_integrations: |
|
151 | 151 | <div class="integration-icon"> |
|
152 | 152 | ${available_integrations[integration.integration_type].icon|n} |
|
153 | 153 | </div> |
|
154 | 154 | %else: |
|
155 | 155 | ? |
|
156 | 156 | %endif |
|
157 | 157 | </td> |
|
158 | 158 | <td class="td-type"> |
|
159 | 159 | ${integration.integration_type} |
|
160 | 160 | </td> |
|
161 | 161 | <td class="td-scope"> |
|
162 | 162 | %if integration.repo: |
|
163 | 163 | <a href="${h.url('summary_home', repo_name=integration.repo.repo_name)}"> |
|
164 | 164 | ${_('repo')}:${integration.repo.repo_name} |
|
165 | 165 | </a> |
|
166 | 166 | %elif integration.repo_group: |
|
167 | 167 | <a href="${h.url('repo_group_home', group_name=integration.repo_group.group_name)}"> |
|
168 | 168 | ${_('repogroup')}:${integration.repo_group.group_name} |
|
169 | %if integration.child_repos_only: | |
|
170 | ${_('child repos only')} | |
|
171 | %else: | |
|
172 | ${_('cascade to all')} | |
|
173 | %endif | |
|
169 | 174 | </a> |
|
170 | 175 | %else: |
|
171 |
%if integration. |
|
|
176 | %if integration.child_repos_only: | |
|
172 | 177 | ${_('top level repos only')} |
|
173 | %elif integration.scope == 'global': | |
|
178 | %else: | |
|
174 | 179 | ${_('global')} |
|
175 | %else: | |
|
176 | ${_('unknown scope')}: ${integration.scope} | |
|
177 | 180 | %endif |
|
178 | 181 | </td> |
|
179 | 182 | %endif |
|
180 | 183 | <td class="td-action"> |
|
181 | 184 | %if not IntegrationType: |
|
182 | 185 | ${_('unknown integration')} |
|
183 | 186 | %else: |
|
184 | 187 | <% |
|
185 | 188 | if c.repo: |
|
186 | 189 | edit_url = request.route_path('repo_integrations_edit', |
|
187 | 190 | repo_name=c.repo.repo_name, |
|
188 | 191 | integration=integration.integration_type, |
|
189 | 192 | integration_id=integration.integration_id) |
|
190 | 193 | elif c.repo_group: |
|
191 | 194 | edit_url = request.route_path('repo_group_integrations_edit', |
|
192 | 195 | repo_group_name=c.repo_group.group_name, |
|
193 | 196 | integration=integration.integration_type, |
|
194 | 197 | integration_id=integration.integration_id) |
|
195 | 198 | else: |
|
196 | 199 | edit_url = request.route_path('global_integrations_edit', |
|
197 | 200 | integration=integration.integration_type, |
|
198 | 201 | integration_id=integration.integration_id) |
|
199 | 202 | %> |
|
200 | 203 | <div class="grid_edit"> |
|
201 | 204 | <a href="${edit_url}">${_('Edit')}</a> |
|
202 | 205 | </div> |
|
203 | 206 | <div class="grid_delete"> |
|
204 | 207 | <a href="${edit_url}" |
|
205 | 208 | class="btn btn-link btn-danger delete_integration_entry" |
|
206 | 209 | data-desc="${integration.name}" |
|
207 | 210 | data-uid="${integration.integration_id}"> |
|
208 | 211 | ${_('Delete')} |
|
209 | 212 | </a> |
|
210 | 213 | </div> |
|
211 | 214 | %endif |
|
212 | 215 | </td> |
|
213 | 216 | </tr> |
|
214 | 217 | %endfor |
|
215 | 218 | <tr id="last-row"></tr> |
|
216 | 219 | </tbody> |
|
217 | 220 | </table> |
|
218 | 221 | <div class="integrations-paginator"> |
|
219 | 222 | <div class="pagination-wh pagination-left"> |
|
220 | 223 | ${integrations_list.pager('$link_previous ~2~ $link_next')} |
|
221 | 224 | </div> |
|
222 | 225 | </div> |
|
223 | 226 | </div> |
|
224 | 227 | </div> |
|
225 | 228 | <script type="text/javascript"> |
|
226 | 229 | var delete_integration = function(entry) { |
|
227 | 230 | if (confirm("Confirm to remove this integration: "+$(entry).data('desc'))) { |
|
228 | 231 | var request = $.ajax({ |
|
229 | 232 | type: "POST", |
|
230 | 233 | url: $(entry).attr('href'), |
|
231 | 234 | data: { |
|
232 | 235 | 'delete': 'delete', |
|
233 | 236 | 'csrf_token': CSRF_TOKEN |
|
234 | 237 | }, |
|
235 | 238 | success: function(){ |
|
236 | 239 | location.reload(); |
|
237 | 240 | }, |
|
238 | 241 | error: function(data, textStatus, errorThrown){ |
|
239 | 242 | alert("Error while deleting entry.\nError code {0} ({1}). URL: {2}".format(data.status,data.statusText,$(entry)[0].url)); |
|
240 | 243 | } |
|
241 | 244 | }); |
|
242 | 245 | }; |
|
243 | 246 | } |
|
244 | 247 | |
|
245 | 248 | $('.delete_integration_entry').on('click', function(e){ |
|
246 | 249 | e.preventDefault(); |
|
247 | 250 | delete_integration(this); |
|
248 | 251 | }); |
|
249 | 252 | </script> No newline at end of file |
@@ -1,262 +1,264 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2016 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import mock |
|
22 | 22 | import pytest |
|
23 | 23 | from webob.exc import HTTPNotFound |
|
24 | 24 | |
|
25 | 25 | import rhodecode |
|
26 | 26 | from rhodecode.model.db import Integration |
|
27 | 27 | from rhodecode.model.meta import Session |
|
28 | 28 | from rhodecode.tests import assert_session_flash, url, TEST_USER_ADMIN_LOGIN |
|
29 | 29 | from rhodecode.tests.utils import AssertResponse |
|
30 | 30 | from rhodecode.integrations import integration_type_registry |
|
31 | 31 | from rhodecode.config.routing import ADMIN_PREFIX |
|
32 | 32 | |
|
33 | 33 | |
|
34 | 34 | @pytest.mark.usefixtures('app', 'autologin_user') |
|
35 | 35 | class TestIntegrationsView(object): |
|
36 | 36 | pass |
|
37 | 37 | |
|
38 | 38 | |
|
39 | 39 | class TestGlobalIntegrationsView(TestIntegrationsView): |
|
40 | 40 | def test_index_no_integrations(self, app): |
|
41 | 41 | url = ADMIN_PREFIX + '/integrations' |
|
42 | 42 | response = app.get(url) |
|
43 | 43 | |
|
44 | 44 | assert response.status_code == 200 |
|
45 | 45 | assert 'exist yet' in response.body |
|
46 | 46 | |
|
47 | 47 | def test_index_with_integrations(self, app, global_integration_stub): |
|
48 | 48 | url = ADMIN_PREFIX + '/integrations' |
|
49 | 49 | response = app.get(url) |
|
50 | 50 | |
|
51 | 51 | assert response.status_code == 200 |
|
52 | 52 | assert 'exist yet' not in response.body |
|
53 | 53 | assert global_integration_stub.name in response.body |
|
54 | 54 | |
|
55 | 55 | def test_new_integration_page(self, app): |
|
56 | 56 | url = ADMIN_PREFIX + '/integrations/new' |
|
57 | 57 | |
|
58 | 58 | response = app.get(url) |
|
59 | 59 | |
|
60 | 60 | assert response.status_code == 200 |
|
61 | 61 | |
|
62 | 62 | for integration_key in integration_type_registry: |
|
63 | 63 | nurl = (ADMIN_PREFIX + '/integrations/{integration}/new').format( |
|
64 | 64 | integration=integration_key) |
|
65 | 65 | assert nurl in response.body |
|
66 | 66 | |
|
67 | 67 | @pytest.mark.parametrize( |
|
68 | 68 | 'IntegrationType', integration_type_registry.values()) |
|
69 | 69 | def test_get_create_integration_page(self, app, IntegrationType): |
|
70 | 70 | url = ADMIN_PREFIX + '/integrations/{integration_key}/new'.format( |
|
71 | 71 | integration_key=IntegrationType.key) |
|
72 | 72 | |
|
73 | 73 | response = app.get(url) |
|
74 | 74 | |
|
75 | 75 | assert response.status_code == 200 |
|
76 | 76 | assert IntegrationType.display_name in response.body |
|
77 | 77 | |
|
78 | 78 | def test_post_integration_page(self, app, StubIntegrationType, csrf_token, |
|
79 | 79 | test_repo_group, backend_random): |
|
80 | 80 | url = ADMIN_PREFIX + '/integrations/{integration_key}/new'.format( |
|
81 | 81 | integration_key=StubIntegrationType.key) |
|
82 | 82 | |
|
83 | 83 | _post_integration_test_helper(app, url, csrf_token, admin_view=True, |
|
84 | 84 | repo=backend_random.repo, repo_group=test_repo_group) |
|
85 | 85 | |
|
86 | 86 | |
|
87 | 87 | class TestRepoGroupIntegrationsView(TestIntegrationsView): |
|
88 | 88 | def test_index_no_integrations(self, app, test_repo_group): |
|
89 | 89 | url = '/{repo_group_name}/settings/integrations'.format( |
|
90 | 90 | repo_group_name=test_repo_group.group_name) |
|
91 | 91 | response = app.get(url) |
|
92 | 92 | |
|
93 | 93 | assert response.status_code == 200 |
|
94 | 94 | assert 'exist yet' in response.body |
|
95 | 95 | |
|
96 | 96 | def test_index_with_integrations(self, app, test_repo_group, |
|
97 | 97 | repogroup_integration_stub): |
|
98 | 98 | url = '/{repo_group_name}/settings/integrations'.format( |
|
99 | 99 | repo_group_name=test_repo_group.group_name) |
|
100 | 100 | |
|
101 | 101 | stub_name = repogroup_integration_stub.name |
|
102 | 102 | response = app.get(url) |
|
103 | 103 | |
|
104 | 104 | assert response.status_code == 200 |
|
105 | 105 | assert 'exist yet' not in response.body |
|
106 | 106 | assert stub_name in response.body |
|
107 | 107 | |
|
108 | 108 | def test_new_integration_page(self, app, test_repo_group): |
|
109 | 109 | repo_group_name = test_repo_group.group_name |
|
110 | 110 | url = '/{repo_group_name}/settings/integrations/new'.format( |
|
111 | 111 | repo_group_name=test_repo_group.group_name) |
|
112 | 112 | |
|
113 | 113 | response = app.get(url) |
|
114 | 114 | |
|
115 | 115 | assert response.status_code == 200 |
|
116 | 116 | |
|
117 | 117 | for integration_key in integration_type_registry: |
|
118 | 118 | nurl = ('/{repo_group_name}/settings/integrations' |
|
119 | 119 | '/{integration}/new').format( |
|
120 | 120 | repo_group_name=repo_group_name, |
|
121 | 121 | integration=integration_key) |
|
122 | 122 | |
|
123 | 123 | assert nurl in response.body |
|
124 | 124 | |
|
125 | 125 | @pytest.mark.parametrize( |
|
126 | 126 | 'IntegrationType', integration_type_registry.values()) |
|
127 | 127 | def test_get_create_integration_page(self, app, test_repo_group, |
|
128 | 128 | IntegrationType): |
|
129 | 129 | repo_group_name = test_repo_group.group_name |
|
130 | 130 | url = ('/{repo_group_name}/settings/integrations/{integration_key}/new' |
|
131 | 131 | ).format(repo_group_name=repo_group_name, |
|
132 | 132 | integration_key=IntegrationType.key) |
|
133 | 133 | |
|
134 | 134 | response = app.get(url) |
|
135 | 135 | |
|
136 | 136 | assert response.status_code == 200 |
|
137 | 137 | assert IntegrationType.display_name in response.body |
|
138 | 138 | |
|
139 | 139 | def test_post_integration_page(self, app, test_repo_group, backend_random, |
|
140 | 140 | StubIntegrationType, csrf_token): |
|
141 | 141 | repo_group_name = test_repo_group.group_name |
|
142 | 142 | url = ('/{repo_group_name}/settings/integrations/{integration_key}/new' |
|
143 | 143 | ).format(repo_group_name=repo_group_name, |
|
144 | 144 | integration_key=StubIntegrationType.key) |
|
145 | 145 | |
|
146 | 146 | _post_integration_test_helper(app, url, csrf_token, admin_view=False, |
|
147 | 147 | repo=backend_random.repo, repo_group=test_repo_group) |
|
148 | 148 | |
|
149 | 149 | |
|
150 | 150 | class TestRepoIntegrationsView(TestIntegrationsView): |
|
151 | 151 | def test_index_no_integrations(self, app, backend_random): |
|
152 | 152 | url = '/{repo_name}/settings/integrations'.format( |
|
153 | 153 | repo_name=backend_random.repo.repo_name) |
|
154 | 154 | response = app.get(url) |
|
155 | 155 | |
|
156 | 156 | assert response.status_code == 200 |
|
157 | 157 | assert 'exist yet' in response.body |
|
158 | 158 | |
|
159 | 159 | def test_index_with_integrations(self, app, repo_integration_stub): |
|
160 | 160 | url = '/{repo_name}/settings/integrations'.format( |
|
161 | 161 | repo_name=repo_integration_stub.repo.repo_name) |
|
162 | 162 | stub_name = repo_integration_stub.name |
|
163 | 163 | |
|
164 | 164 | response = app.get(url) |
|
165 | 165 | |
|
166 | 166 | assert response.status_code == 200 |
|
167 | 167 | assert stub_name in response.body |
|
168 | 168 | assert 'exist yet' not in response.body |
|
169 | 169 | |
|
170 | 170 | def test_new_integration_page(self, app, backend_random): |
|
171 | 171 | repo_name = backend_random.repo.repo_name |
|
172 | 172 | url = '/{repo_name}/settings/integrations/new'.format( |
|
173 | 173 | repo_name=repo_name) |
|
174 | 174 | |
|
175 | 175 | response = app.get(url) |
|
176 | 176 | |
|
177 | 177 | assert response.status_code == 200 |
|
178 | 178 | |
|
179 | 179 | for integration_key in integration_type_registry: |
|
180 | 180 | nurl = ('/{repo_name}/settings/integrations' |
|
181 | 181 | '/{integration}/new').format( |
|
182 | 182 | repo_name=repo_name, |
|
183 | 183 | integration=integration_key) |
|
184 | 184 | |
|
185 | 185 | assert nurl in response.body |
|
186 | 186 | |
|
187 | 187 | @pytest.mark.parametrize( |
|
188 | 188 | 'IntegrationType', integration_type_registry.values()) |
|
189 | 189 | def test_get_create_integration_page(self, app, backend_random, |
|
190 | 190 | IntegrationType): |
|
191 | 191 | repo_name = backend_random.repo.repo_name |
|
192 | 192 | url = '/{repo_name}/settings/integrations/{integration_key}/new'.format( |
|
193 | 193 | repo_name=repo_name, integration_key=IntegrationType.key) |
|
194 | 194 | |
|
195 | 195 | response = app.get(url) |
|
196 | 196 | |
|
197 | 197 | assert response.status_code == 200 |
|
198 | 198 | assert IntegrationType.display_name in response.body |
|
199 | 199 | |
|
200 | 200 | def test_post_integration_page(self, app, backend_random, test_repo_group, |
|
201 | 201 | StubIntegrationType, csrf_token): |
|
202 | 202 | repo_name = backend_random.repo.repo_name |
|
203 | 203 | url = '/{repo_name}/settings/integrations/{integration_key}/new'.format( |
|
204 | 204 | repo_name=repo_name, integration_key=StubIntegrationType.key) |
|
205 | 205 | |
|
206 | 206 | _post_integration_test_helper(app, url, csrf_token, admin_view=False, |
|
207 | 207 | repo=backend_random.repo, repo_group=test_repo_group) |
|
208 | 208 | |
|
209 | 209 | |
|
210 | 210 | def _post_integration_test_helper(app, url, csrf_token, repo, repo_group, |
|
211 | 211 | admin_view): |
|
212 | 212 | """ |
|
213 | 213 | Posts form data to create integration at the url given then deletes it and |
|
214 | 214 | checks if the redirect url is correct. |
|
215 | 215 | """ |
|
216 | 216 | |
|
217 | 217 | app.post(url, params={}, status=403) # missing csrf check |
|
218 | 218 | response = app.post(url, params={'csrf_token': csrf_token}) |
|
219 | 219 | assert response.status_code == 200 |
|
220 | 220 | assert 'Errors exist' in response.body |
|
221 | 221 | |
|
222 | 222 | scopes_destinations = [ |
|
223 | 223 | ('global', |
|
224 | 224 | ADMIN_PREFIX + '/integrations'), |
|
225 |
('root |
|
|
225 | ('root-repos', | |
|
226 | 226 | ADMIN_PREFIX + '/integrations'), |
|
227 | 227 | ('repo:%s' % repo.repo_name, |
|
228 | 228 | '/%s/settings/integrations' % repo.repo_name), |
|
229 | 229 | ('repogroup:%s' % repo_group.group_name, |
|
230 | 230 | '/%s/settings/integrations' % repo_group.group_name), |
|
231 | ('repogroup-recursive:%s' % repo_group.group_name, | |
|
232 | '/%s/settings/integrations' % repo_group.group_name), | |
|
231 | 233 | ] |
|
232 | 234 | |
|
233 | 235 | for scope, destination in scopes_destinations: |
|
234 | 236 | if admin_view: |
|
235 | 237 | destination = ADMIN_PREFIX + '/integrations' |
|
236 | 238 | |
|
237 | 239 | form_data = [ |
|
238 | 240 | ('csrf_token', csrf_token), |
|
239 | 241 | ('__start__', 'options:mapping'), |
|
240 | 242 | ('name', 'test integration'), |
|
241 | 243 | ('scope', scope), |
|
242 | 244 | ('enabled', 'true'), |
|
243 | 245 | ('__end__', 'options:mapping'), |
|
244 | 246 | ('__start__', 'settings:mapping'), |
|
245 | 247 | ('test_int_field', '34'), |
|
246 | 248 | ('test_string_field', ''), # empty value on purpose as it's required |
|
247 | 249 | ('__end__', 'settings:mapping'), |
|
248 | 250 | ] |
|
249 | 251 | errors_response = app.post(url, form_data) |
|
250 | 252 | assert 'Errors exist' in errors_response.body |
|
251 | 253 | |
|
252 | 254 | form_data[-2] = ('test_string_field', 'data!') |
|
253 | 255 | assert Session().query(Integration).count() == 0 |
|
254 | 256 | created_response = app.post(url, form_data) |
|
255 | 257 | assert Session().query(Integration).count() == 1 |
|
256 | 258 | |
|
257 | 259 | delete_response = app.post( |
|
258 | 260 | created_response.location, |
|
259 | 261 | params={'csrf_token': csrf_token, 'delete': 'delete'}) |
|
260 | 262 | |
|
261 | 263 | assert Session().query(Integration).count() == 0 |
|
262 | 264 | assert delete_response.location.endswith(destination) |
@@ -1,192 +1,222 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2016 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import time |
|
22 | 22 | import pytest |
|
23 | 23 | |
|
24 | 24 | from rhodecode import events |
|
25 | 25 | from rhodecode.tests.fixture import Fixture |
|
26 | 26 | from rhodecode.model.db import Session, Integration |
|
27 | 27 | from rhodecode.model.integration import IntegrationModel |
|
28 | 28 | from rhodecode.integrations.types.base import IntegrationTypeBase |
|
29 | 29 | |
|
30 | 30 | |
|
31 | 31 | class TestDeleteScopesDeletesIntegrations(object): |
|
32 | 32 | def test_delete_repo_with_integration_deletes_integration(self, |
|
33 | 33 | repo_integration_stub): |
|
34 | 34 | Session().delete(repo_integration_stub.repo) |
|
35 | 35 | Session().commit() |
|
36 | 36 | Session().expire_all() |
|
37 | 37 | integration = Integration.get(repo_integration_stub.integration_id) |
|
38 | 38 | assert integration is None |
|
39 | 39 | |
|
40 | 40 | |
|
41 | 41 | def test_delete_repo_group_with_integration_deletes_integration(self, |
|
42 | 42 | repogroup_integration_stub): |
|
43 | 43 | Session().delete(repogroup_integration_stub.repo_group) |
|
44 | 44 | Session().commit() |
|
45 | 45 | Session().expire_all() |
|
46 | 46 | integration = Integration.get(repogroup_integration_stub.integration_id) |
|
47 | 47 | assert integration is None |
|
48 | 48 | |
|
49 | 49 | |
|
50 | 50 | @pytest.fixture |
|
51 | 51 | def integration_repos(request, StubIntegrationType, stub_integration_settings): |
|
52 | 52 | """ |
|
53 | 53 | Create repositories and integrations for testing, and destroy them after |
|
54 | ||
|
55 | Structure: | |
|
56 | root_repo | |
|
57 | parent_group/ | |
|
58 | parent_repo | |
|
59 | child_group/ | |
|
60 | child_repo | |
|
61 | other_group/ | |
|
62 | other_repo | |
|
54 | 63 | """ |
|
55 | 64 | fixture = Fixture() |
|
56 | 65 | |
|
57 | repo_group_1_id = 'int_test_repo_group_1_%s' % time.time() | |
|
58 | repo_group_1 = fixture.create_repo_group(repo_group_1_id) | |
|
59 | repo_group_2_id = 'int_test_repo_group_2_%s' % time.time() | |
|
60 | repo_group_2 = fixture.create_repo_group(repo_group_2_id) | |
|
66 | ||
|
67 | parent_group_id = 'int_test_parent_group_%s' % time.time() | |
|
68 | parent_group = fixture.create_repo_group(parent_group_id) | |
|
69 | ||
|
70 | other_group_id = 'int_test_other_group_%s' % time.time() | |
|
71 | other_group = fixture.create_repo_group(other_group_id) | |
|
61 | 72 | |
|
62 | repo_1_id = 'int_test_repo_1_%s' % time.time() | |
|
63 | repo_1 = fixture.create_repo(repo_1_id, repo_group=repo_group_1) | |
|
64 | repo_2_id = 'int_test_repo_2_%s' % time.time() | |
|
65 | repo_2 = fixture.create_repo(repo_2_id, repo_group=repo_group_2) | |
|
73 | child_group_id = ( | |
|
74 | parent_group_id + '/' + 'int_test_child_group_%s' % time.time()) | |
|
75 | child_group = fixture.create_repo_group(child_group_id) | |
|
76 | ||
|
77 | parent_repo_id = 'int_test_parent_repo_%s' % time.time() | |
|
78 | parent_repo = fixture.create_repo(parent_repo_id, repo_group=parent_group) | |
|
79 | ||
|
80 | child_repo_id = 'int_test_child_repo_%s' % time.time() | |
|
81 | child_repo = fixture.create_repo(child_repo_id, repo_group=child_group) | |
|
82 | ||
|
83 | other_repo_id = 'int_test_other_repo_%s' % time.time() | |
|
84 | other_repo = fixture.create_repo(other_repo_id, repo_group=other_group) | |
|
66 | 85 | |
|
67 | 86 | root_repo_id = 'int_test_repo_root_%s' % time.time() |
|
68 | 87 | root_repo = fixture.create_repo(root_repo_id) |
|
69 | 88 | |
|
70 | integration_global = IntegrationModel().create( | |
|
71 | StubIntegrationType, settings=stub_integration_settings, | |
|
72 | enabled=True, name='test global integration', scope='global') | |
|
73 | integration_root_repos = IntegrationModel().create( | |
|
74 | StubIntegrationType, settings=stub_integration_settings, | |
|
75 | enabled=True, name='test root repos integration', scope='root_repos') | |
|
76 | integration_repo_1 = IntegrationModel().create( | |
|
89 | integrations = {} | |
|
90 | for name, repo, repo_group, child_repos_only in [ | |
|
91 | ('global', None, None, None), | |
|
92 | ('root_repos', None, None, True), | |
|
93 | ('parent_repo', parent_repo, None, None), | |
|
94 | ('child_repo', child_repo, None, None), | |
|
95 | ('other_repo', other_repo, None, None), | |
|
96 | ('root_repo', root_repo, None, None), | |
|
97 | ('parent_group', None, parent_group, True), | |
|
98 | ('parent_group_recursive', None, parent_group, False), | |
|
99 | ('child_group', None, child_group, True), | |
|
100 | ('child_group_recursive', None, child_group, False), | |
|
101 | ('other_group', None, other_group, True), | |
|
102 | ('other_group_recursive', None, other_group, False), | |
|
103 | ]: | |
|
104 | integrations[name] = IntegrationModel().create( | |
|
77 | 105 | StubIntegrationType, settings=stub_integration_settings, |
|
78 |
enabled=True, name='test |
|
|
79 | integration_repo_group_1 = IntegrationModel().create( | |
|
80 | StubIntegrationType, settings=stub_integration_settings, | |
|
81 | enabled=True, name='test repo group 1 integration', scope=repo_group_1) | |
|
82 | integration_repo_2 = IntegrationModel().create( | |
|
83 | StubIntegrationType, settings=stub_integration_settings, | |
|
84 | enabled=True, name='test repo 2 integration', scope=repo_2) | |
|
85 | integration_repo_group_2 = IntegrationModel().create( | |
|
86 | StubIntegrationType, settings=stub_integration_settings, | |
|
87 | enabled=True, name='test repo group 2 integration', scope=repo_group_2) | |
|
106 | enabled=True, name='test %s integration' % name, | |
|
107 | repo=repo, repo_group=repo_group, child_repos_only=child_repos_only) | |
|
88 | 108 | |
|
89 | 109 | Session().commit() |
|
90 | 110 | |
|
91 | 111 | def _cleanup(): |
|
92 | Session().delete(integration_global) | |
|
93 |
Session |
|
|
94 | Session().delete(integration_repo_1) | |
|
95 | Session().delete(integration_repo_group_1) | |
|
96 | Session().delete(integration_repo_2) | |
|
97 | Session().delete(integration_repo_group_2) | |
|
112 | for integration in integrations.values(): | |
|
113 | Session.delete(integration) | |
|
114 | ||
|
98 | 115 | fixture.destroy_repo(root_repo) |
|
99 |
fixture.destroy_repo(repo |
|
|
100 |
fixture.destroy_repo(repo |
|
|
101 |
fixture.destroy_repo |
|
|
102 |
fixture.destroy_repo_group( |
|
|
116 | fixture.destroy_repo(child_repo) | |
|
117 | fixture.destroy_repo(parent_repo) | |
|
118 | fixture.destroy_repo(other_repo) | |
|
119 | fixture.destroy_repo_group(child_group) | |
|
120 | fixture.destroy_repo_group(parent_group) | |
|
121 | fixture.destroy_repo_group(other_group) | |
|
103 | 122 | |
|
104 | 123 | request.addfinalizer(_cleanup) |
|
105 | 124 | |
|
106 | 125 | return { |
|
126 | 'integrations': integrations, | |
|
107 | 127 | 'repos': { |
|
108 | 'repo_1': repo_1, | |
|
109 | 'repo_2': repo_2, | |
|
110 | 128 | 'root_repo': root_repo, |
|
111 | }, | |
|
112 | 'repo_groups': { | |
|
113 |
'repo |
|
|
114 | 'repo_group_2': repo_group_2, | |
|
115 | }, | |
|
116 | 'integrations': { | |
|
117 | 'global': integration_global, | |
|
118 | 'root_repos': integration_root_repos, | |
|
119 | 'repo_1': integration_repo_1, | |
|
120 | 'repo_2': integration_repo_2, | |
|
121 | 'repo_group_1': integration_repo_group_1, | |
|
122 | 'repo_group_2': integration_repo_group_2, | |
|
129 | 'other_repo': other_repo, | |
|
130 | 'parent_repo': parent_repo, | |
|
131 | 'child_repo': child_repo, | |
|
123 | 132 | } |
|
124 | 133 | } |
|
125 | 134 | |
|
126 | 135 | |
|
127 | 136 | def test_enabled_integration_repo_scopes(integration_repos): |
|
128 | 137 | integrations = integration_repos['integrations'] |
|
129 | 138 | repos = integration_repos['repos'] |
|
130 | 139 | |
|
131 | 140 | triggered_integrations = IntegrationModel().get_for_event( |
|
132 | 141 | events.RepoEvent(repos['root_repo'])) |
|
133 | 142 | |
|
134 | 143 | assert triggered_integrations == [ |
|
135 | 144 | integrations['global'], |
|
136 | integrations['root_repos'] | |
|
145 | integrations['root_repos'], | |
|
146 | integrations['root_repo'], | |
|
147 | ] | |
|
148 | ||
|
149 | ||
|
150 | triggered_integrations = IntegrationModel().get_for_event( | |
|
151 | events.RepoEvent(repos['other_repo'])) | |
|
152 | ||
|
153 | assert triggered_integrations == [ | |
|
154 | integrations['global'], | |
|
155 | integrations['other_repo'], | |
|
156 | integrations['other_group'], | |
|
157 | integrations['other_group_recursive'], | |
|
137 | 158 | ] |
|
138 | 159 | |
|
139 | 160 | |
|
140 | 161 | triggered_integrations = IntegrationModel().get_for_event( |
|
141 |
events.RepoEvent(repos['repo |
|
|
162 | events.RepoEvent(repos['parent_repo'])) | |
|
142 | 163 | |
|
143 | 164 | assert triggered_integrations == [ |
|
144 | 165 | integrations['global'], |
|
145 |
integrations['repo |
|
|
146 |
integrations[' |
|
|
166 | integrations['parent_repo'], | |
|
167 | integrations['parent_group'], | |
|
168 | integrations['parent_group_recursive'], | |
|
147 | 169 | ] |
|
148 | 170 | |
|
149 | ||
|
150 | 171 | triggered_integrations = IntegrationModel().get_for_event( |
|
151 |
events.RepoEvent(repos['repo |
|
|
172 | events.RepoEvent(repos['child_repo'])) | |
|
152 | 173 | |
|
153 | 174 | assert triggered_integrations == [ |
|
154 | 175 | integrations['global'], |
|
155 |
integrations['repo |
|
|
156 |
integrations[' |
|
|
176 | integrations['child_repo'], | |
|
177 | integrations['parent_group_recursive'], | |
|
178 | integrations['child_group'], | |
|
179 | integrations['child_group_recursive'], | |
|
157 | 180 | ] |
|
158 | 181 | |
|
159 | 182 | |
|
160 | 183 | def test_disabled_integration_repo_scopes(integration_repos): |
|
161 | 184 | integrations = integration_repos['integrations'] |
|
162 | 185 | repos = integration_repos['repos'] |
|
163 | 186 | |
|
164 | 187 | for integration in integrations.values(): |
|
165 | 188 | integration.enabled = False |
|
166 | 189 | Session().commit() |
|
167 | 190 | |
|
168 | 191 | triggered_integrations = IntegrationModel().get_for_event( |
|
169 | 192 | events.RepoEvent(repos['root_repo'])) |
|
170 | 193 | |
|
171 | 194 | assert triggered_integrations == [] |
|
172 | 195 | |
|
173 | 196 | |
|
174 | 197 | triggered_integrations = IntegrationModel().get_for_event( |
|
175 |
events.RepoEvent(repos['repo |
|
|
198 | events.RepoEvent(repos['parent_repo'])) | |
|
176 | 199 | |
|
177 | 200 | assert triggered_integrations == [] |
|
178 | 201 | |
|
179 | 202 | |
|
180 | 203 | triggered_integrations = IntegrationModel().get_for_event( |
|
181 |
events.RepoEvent(repos['repo |
|
|
204 | events.RepoEvent(repos['child_repo'])) | |
|
182 | 205 | |
|
183 | 206 | assert triggered_integrations == [] |
|
184 | 207 | |
|
185 | 208 | |
|
209 | triggered_integrations = IntegrationModel().get_for_event( | |
|
210 | events.RepoEvent(repos['other_repo'])) | |
|
211 | ||
|
212 | assert triggered_integrations == [] | |
|
213 | ||
|
214 | ||
|
215 | ||
|
186 | 216 | def test_enabled_non_repo_integrations(integration_repos): |
|
187 | 217 | integrations = integration_repos['integrations'] |
|
188 | 218 | |
|
189 | 219 | triggered_integrations = IntegrationModel().get_for_event( |
|
190 | 220 | events.UserPreCreate({})) |
|
191 | 221 | |
|
192 | 222 | assert triggered_integrations == [integrations['global']] |
@@ -1,120 +1,171 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2016-2016 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import colander |
|
22 | 22 | import pytest |
|
23 | 23 | |
|
24 | 24 | from rhodecode.model import validation_schema |
|
25 | 25 | |
|
26 | 26 | from rhodecode.integrations import integration_type_registry |
|
27 | 27 | from rhodecode.integrations.types.base import IntegrationTypeBase |
|
28 | 28 | from rhodecode.model.validation_schema.schemas.integration_schema import ( |
|
29 | 29 | make_integration_schema |
|
30 | 30 | ) |
|
31 | 31 | |
|
32 | 32 | |
|
33 | 33 | @pytest.mark.usefixtures('app', 'autologin_user') |
|
34 | 34 | class TestIntegrationSchema(object): |
|
35 | 35 | |
|
36 | 36 | def test_deserialize_integration_schema_perms(self, backend_random, |
|
37 | 37 | test_repo_group, |
|
38 | 38 | StubIntegrationType): |
|
39 | 39 | |
|
40 | 40 | repo = backend_random.repo |
|
41 | 41 | repo_group = test_repo_group |
|
42 | 42 | |
|
43 | 43 | |
|
44 | 44 | empty_perms_dict = { |
|
45 | 45 | 'global': [], |
|
46 | 46 | 'repositories': {}, |
|
47 | 47 | 'repositories_groups': {}, |
|
48 | 48 | } |
|
49 | 49 | |
|
50 |
perms_tests = |
|
|
51 | ('repo:%s' % repo.repo_name, repo): [ | |
|
50 | perms_tests = [ | |
|
51 | ( | |
|
52 | 'repo:%s' % repo.repo_name, | |
|
53 | { | |
|
54 | 'child_repos_only': None, | |
|
55 | 'repo_group': None, | |
|
56 | 'repo': repo, | |
|
57 | }, | |
|
58 | [ | |
|
52 | 59 | ({}, False), |
|
53 | 60 | ({'global': ['hg.admin']}, True), |
|
54 | 61 | ({'global': []}, False), |
|
55 | 62 | ({'repositories': {repo.repo_name: 'repository.admin'}}, True), |
|
56 | 63 | ({'repositories': {repo.repo_name: 'repository.read'}}, False), |
|
57 | 64 | ({'repositories': {repo.repo_name: 'repository.write'}}, False), |
|
58 | 65 | ({'repositories': {repo.repo_name: 'repository.none'}}, False), |
|
59 |
] |
|
|
60 | ('repogroup:%s' % repo_group.group_name, repo_group): [ | |
|
66 | ] | |
|
67 | ), | |
|
68 | ( | |
|
69 | 'repogroup:%s' % repo_group.group_name, | |
|
70 | { | |
|
71 | 'repo': None, | |
|
72 | 'repo_group': repo_group, | |
|
73 | 'child_repos_only': True, | |
|
74 | }, | |
|
75 | [ | |
|
61 | 76 | ({}, False), |
|
62 | 77 | ({'global': ['hg.admin']}, True), |
|
63 | 78 | ({'global': []}, False), |
|
64 | 79 | ({'repositories_groups': |
|
65 | 80 | {repo_group.group_name: 'group.admin'}}, True), |
|
66 | 81 | ({'repositories_groups': |
|
67 | 82 | {repo_group.group_name: 'group.read'}}, False), |
|
68 | 83 | ({'repositories_groups': |
|
69 | 84 | {repo_group.group_name: 'group.write'}}, False), |
|
70 | 85 | ({'repositories_groups': |
|
71 | 86 | {repo_group.group_name: 'group.none'}}, False), |
|
72 |
] |
|
|
73 | ('global', 'global'): [ | |
|
87 | ] | |
|
88 | ), | |
|
89 | ( | |
|
90 | 'repogroup-recursive:%s' % repo_group.group_name, | |
|
91 | { | |
|
92 | 'repo': None, | |
|
93 | 'repo_group': repo_group, | |
|
94 | 'child_repos_only': False, | |
|
95 | }, | |
|
96 | [ | |
|
74 | 97 | ({}, False), |
|
75 | 98 | ({'global': ['hg.admin']}, True), |
|
76 | 99 | ({'global': []}, False), |
|
77 | ], | |
|
78 | ('root_repos', 'root_repos'): [ | |
|
100 | ({'repositories_groups': | |
|
101 | {repo_group.group_name: 'group.admin'}}, True), | |
|
102 | ({'repositories_groups': | |
|
103 | {repo_group.group_name: 'group.read'}}, False), | |
|
104 | ({'repositories_groups': | |
|
105 | {repo_group.group_name: 'group.write'}}, False), | |
|
106 | ({'repositories_groups': | |
|
107 | {repo_group.group_name: 'group.none'}}, False), | |
|
108 | ] | |
|
109 | ), | |
|
110 | ( | |
|
111 | 'global', | |
|
112 | { | |
|
113 | 'repo': None, | |
|
114 | 'repo_group': None, | |
|
115 | 'child_repos_only': False, | |
|
116 | }, [ | |
|
79 | 117 | ({}, False), |
|
80 | 118 | ({'global': ['hg.admin']}, True), |
|
81 | 119 | ({'global': []}, False), |
|
82 |
] |
|
|
83 |
|
|
|
120 | ] | |
|
121 | ), | |
|
122 | ( | |
|
123 | 'root-repos', | |
|
124 | { | |
|
125 | 'repo': None, | |
|
126 | 'repo_group': None, | |
|
127 | 'child_repos_only': True, | |
|
128 | }, [ | |
|
129 | ({}, False), | |
|
130 | ({'global': ['hg.admin']}, True), | |
|
131 | ({'global': []}, False), | |
|
132 | ] | |
|
133 | ), | |
|
134 | ] | |
|
84 | 135 | |
|
85 |
for |
|
|
136 | for scope_input, scope_output, perms_allowed in perms_tests: | |
|
86 | 137 | for perms_update, allowed in perms_allowed: |
|
87 | 138 | perms = dict(empty_perms_dict, **perms_update) |
|
88 | 139 | |
|
89 | 140 | schema = make_integration_schema( |
|
90 | 141 | IntegrationType=StubIntegrationType |
|
91 | 142 | ).bind(permissions=perms) |
|
92 | 143 | |
|
93 | 144 | input_data = { |
|
94 | 145 | 'options': { |
|
95 | 146 | 'enabled': 'true', |
|
96 | 147 | 'scope': scope_input, |
|
97 | 148 | 'name': 'test integration', |
|
98 | 149 | }, |
|
99 | 150 | 'settings': { |
|
100 | 151 | 'test_string_field': 'stringy', |
|
101 | 152 | 'test_int_field': '100', |
|
102 | 153 | } |
|
103 | 154 | } |
|
104 | 155 | |
|
105 | 156 | if not allowed: |
|
106 | 157 | with pytest.raises(colander.Invalid): |
|
107 | 158 | schema.deserialize(input_data) |
|
108 | 159 | else: |
|
109 | 160 | assert schema.deserialize(input_data) == { |
|
110 | 161 | 'options': { |
|
111 | 162 | 'enabled': True, |
|
112 | 163 | 'scope': scope_output, |
|
113 | 164 | 'name': 'test integration', |
|
114 | 165 | }, |
|
115 | 166 | 'settings': { |
|
116 | 167 | 'test_string_field': 'stringy', |
|
117 | 168 | 'test_int_field': 100, |
|
118 | 169 | } |
|
119 | 170 | } |
|
120 | 171 |
@@ -1,1760 +1,1779 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2016 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import collections |
|
22 | 22 | import datetime |
|
23 | 23 | import hashlib |
|
24 | 24 | import os |
|
25 | 25 | import re |
|
26 | 26 | import pprint |
|
27 | 27 | import shutil |
|
28 | 28 | import socket |
|
29 | 29 | import subprocess |
|
30 | 30 | import time |
|
31 | 31 | import uuid |
|
32 | 32 | |
|
33 | 33 | import mock |
|
34 | 34 | import pyramid.testing |
|
35 | 35 | import pytest |
|
36 | 36 | import colander |
|
37 | 37 | import requests |
|
38 | 38 | from webtest.app import TestApp |
|
39 | 39 | |
|
40 | 40 | import rhodecode |
|
41 | 41 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
42 | 42 | from rhodecode.model.comment import ChangesetCommentsModel |
|
43 | 43 | from rhodecode.model.db import ( |
|
44 | 44 | PullRequest, Repository, RhodeCodeSetting, ChangesetStatus, RepoGroup, |
|
45 | 45 | UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi, Integration) |
|
46 | 46 | from rhodecode.model.meta import Session |
|
47 | 47 | from rhodecode.model.pull_request import PullRequestModel |
|
48 | 48 | from rhodecode.model.repo import RepoModel |
|
49 | 49 | from rhodecode.model.repo_group import RepoGroupModel |
|
50 | 50 | from rhodecode.model.user import UserModel |
|
51 | 51 | from rhodecode.model.settings import VcsSettingsModel |
|
52 | 52 | from rhodecode.model.user_group import UserGroupModel |
|
53 | 53 | from rhodecode.model.integration import IntegrationModel |
|
54 | 54 | from rhodecode.integrations import integration_type_registry |
|
55 | 55 | from rhodecode.integrations.types.base import IntegrationTypeBase |
|
56 | 56 | from rhodecode.lib.utils import repo2db_mapper |
|
57 | 57 | from rhodecode.lib.vcs import create_vcsserver_proxy |
|
58 | 58 | from rhodecode.lib.vcs.backends import get_backend |
|
59 | 59 | from rhodecode.lib.vcs.nodes import FileNode |
|
60 | 60 | from rhodecode.tests import ( |
|
61 | 61 | login_user_session, get_new_dir, utils, TESTS_TMP_PATH, |
|
62 | 62 | TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN, |
|
63 | 63 | TEST_USER_REGULAR_PASS) |
|
64 | 64 | from rhodecode.tests.fixture import Fixture |
|
65 | 65 | |
|
66 | 66 | |
|
67 | 67 | def _split_comma(value): |
|
68 | 68 | return value.split(',') |
|
69 | 69 | |
|
70 | 70 | |
|
71 | 71 | def pytest_addoption(parser): |
|
72 | 72 | parser.addoption( |
|
73 | 73 | '--keep-tmp-path', action='store_true', |
|
74 | 74 | help="Keep the test temporary directories") |
|
75 | 75 | parser.addoption( |
|
76 | 76 | '--backends', action='store', type=_split_comma, |
|
77 | 77 | default=['git', 'hg', 'svn'], |
|
78 | 78 | help="Select which backends to test for backend specific tests.") |
|
79 | 79 | parser.addoption( |
|
80 | 80 | '--dbs', action='store', type=_split_comma, |
|
81 | 81 | default=['sqlite'], |
|
82 | 82 | help="Select which database to test for database specific tests. " |
|
83 | 83 | "Possible options are sqlite,postgres,mysql") |
|
84 | 84 | parser.addoption( |
|
85 | 85 | '--appenlight', '--ae', action='store_true', |
|
86 | 86 | help="Track statistics in appenlight.") |
|
87 | 87 | parser.addoption( |
|
88 | 88 | '--appenlight-api-key', '--ae-key', |
|
89 | 89 | help="API key for Appenlight.") |
|
90 | 90 | parser.addoption( |
|
91 | 91 | '--appenlight-url', '--ae-url', |
|
92 | 92 | default="https://ae.rhodecode.com", |
|
93 | 93 | help="Appenlight service URL, defaults to https://ae.rhodecode.com") |
|
94 | 94 | parser.addoption( |
|
95 | 95 | '--sqlite-connection-string', action='store', |
|
96 | 96 | default='', help="Connection string for the dbs tests with SQLite") |
|
97 | 97 | parser.addoption( |
|
98 | 98 | '--postgres-connection-string', action='store', |
|
99 | 99 | default='', help="Connection string for the dbs tests with Postgres") |
|
100 | 100 | parser.addoption( |
|
101 | 101 | '--mysql-connection-string', action='store', |
|
102 | 102 | default='', help="Connection string for the dbs tests with MySQL") |
|
103 | 103 | parser.addoption( |
|
104 | 104 | '--repeat', type=int, default=100, |
|
105 | 105 | help="Number of repetitions in performance tests.") |
|
106 | 106 | |
|
107 | 107 | |
|
108 | 108 | def pytest_configure(config): |
|
109 | 109 | # Appy the kombu patch early on, needed for test discovery on Python 2.7.11 |
|
110 | 110 | from rhodecode.config import patches |
|
111 | 111 | patches.kombu_1_5_1_python_2_7_11() |
|
112 | 112 | |
|
113 | 113 | |
|
114 | 114 | def pytest_collection_modifyitems(session, config, items): |
|
115 | 115 | # nottest marked, compare nose, used for transition from nose to pytest |
|
116 | 116 | remaining = [ |
|
117 | 117 | i for i in items if getattr(i.obj, '__test__', True)] |
|
118 | 118 | items[:] = remaining |
|
119 | 119 | |
|
120 | 120 | |
|
121 | 121 | def pytest_generate_tests(metafunc): |
|
122 | 122 | # Support test generation based on --backend parameter |
|
123 | 123 | if 'backend_alias' in metafunc.fixturenames: |
|
124 | 124 | backends = get_backends_from_metafunc(metafunc) |
|
125 | 125 | scope = None |
|
126 | 126 | if not backends: |
|
127 | 127 | pytest.skip("Not enabled for any of selected backends") |
|
128 | 128 | metafunc.parametrize('backend_alias', backends, scope=scope) |
|
129 | 129 | elif hasattr(metafunc.function, 'backends'): |
|
130 | 130 | backends = get_backends_from_metafunc(metafunc) |
|
131 | 131 | if not backends: |
|
132 | 132 | pytest.skip("Not enabled for any of selected backends") |
|
133 | 133 | |
|
134 | 134 | |
|
135 | 135 | def get_backends_from_metafunc(metafunc): |
|
136 | 136 | requested_backends = set(metafunc.config.getoption('--backends')) |
|
137 | 137 | if hasattr(metafunc.function, 'backends'): |
|
138 | 138 | # Supported backends by this test function, created from |
|
139 | 139 | # pytest.mark.backends |
|
140 | 140 | backends = metafunc.function.backends.args |
|
141 | 141 | elif hasattr(metafunc.cls, 'backend_alias'): |
|
142 | 142 | # Support class attribute "backend_alias", this is mainly |
|
143 | 143 | # for legacy reasons for tests not yet using pytest.mark.backends |
|
144 | 144 | backends = [metafunc.cls.backend_alias] |
|
145 | 145 | else: |
|
146 | 146 | backends = metafunc.config.getoption('--backends') |
|
147 | 147 | return requested_backends.intersection(backends) |
|
148 | 148 | |
|
149 | 149 | |
|
150 | 150 | @pytest.fixture(scope='session', autouse=True) |
|
151 | 151 | def activate_example_rcextensions(request): |
|
152 | 152 | """ |
|
153 | 153 | Patch in an example rcextensions module which verifies passed in kwargs. |
|
154 | 154 | """ |
|
155 | 155 | from rhodecode.tests.other import example_rcextensions |
|
156 | 156 | |
|
157 | 157 | old_extensions = rhodecode.EXTENSIONS |
|
158 | 158 | rhodecode.EXTENSIONS = example_rcextensions |
|
159 | 159 | |
|
160 | 160 | @request.addfinalizer |
|
161 | 161 | def cleanup(): |
|
162 | 162 | rhodecode.EXTENSIONS = old_extensions |
|
163 | 163 | |
|
164 | 164 | |
|
165 | 165 | @pytest.fixture |
|
166 | 166 | def capture_rcextensions(): |
|
167 | 167 | """ |
|
168 | 168 | Returns the recorded calls to entry points in rcextensions. |
|
169 | 169 | """ |
|
170 | 170 | calls = rhodecode.EXTENSIONS.calls |
|
171 | 171 | calls.clear() |
|
172 | 172 | # Note: At this moment, it is still the empty dict, but that will |
|
173 | 173 | # be filled during the test run and since it is a reference this |
|
174 | 174 | # is enough to make it work. |
|
175 | 175 | return calls |
|
176 | 176 | |
|
177 | 177 | |
|
178 | 178 | @pytest.fixture(scope='session') |
|
179 | 179 | def http_environ_session(): |
|
180 | 180 | """ |
|
181 | 181 | Allow to use "http_environ" in session scope. |
|
182 | 182 | """ |
|
183 | 183 | return http_environ( |
|
184 | 184 | http_host_stub=http_host_stub()) |
|
185 | 185 | |
|
186 | 186 | |
|
187 | 187 | @pytest.fixture |
|
188 | 188 | def http_host_stub(): |
|
189 | 189 | """ |
|
190 | 190 | Value of HTTP_HOST in the test run. |
|
191 | 191 | """ |
|
192 | 192 | return 'test.example.com:80' |
|
193 | 193 | |
|
194 | 194 | |
|
195 | 195 | @pytest.fixture |
|
196 | 196 | def http_environ(http_host_stub): |
|
197 | 197 | """ |
|
198 | 198 | HTTP extra environ keys. |
|
199 | 199 | |
|
200 | 200 | User by the test application and as well for setting up the pylons |
|
201 | 201 | environment. In the case of the fixture "app" it should be possible |
|
202 | 202 | to override this for a specific test case. |
|
203 | 203 | """ |
|
204 | 204 | return { |
|
205 | 205 | 'SERVER_NAME': http_host_stub.split(':')[0], |
|
206 | 206 | 'SERVER_PORT': http_host_stub.split(':')[1], |
|
207 | 207 | 'HTTP_HOST': http_host_stub, |
|
208 | 208 | } |
|
209 | 209 | |
|
210 | 210 | |
|
211 | 211 | @pytest.fixture(scope='function') |
|
212 | 212 | def app(request, pylonsapp, http_environ): |
|
213 | 213 | app = TestApp( |
|
214 | 214 | pylonsapp, |
|
215 | 215 | extra_environ=http_environ) |
|
216 | 216 | if request.cls: |
|
217 | 217 | request.cls.app = app |
|
218 | 218 | return app |
|
219 | 219 | |
|
220 | 220 | |
|
221 | 221 | @pytest.fixture() |
|
222 | 222 | def app_settings(pylonsapp, pylons_config): |
|
223 | 223 | """ |
|
224 | 224 | Settings dictionary used to create the app. |
|
225 | 225 | |
|
226 | 226 | Parses the ini file and passes the result through the sanitize and apply |
|
227 | 227 | defaults mechanism in `rhodecode.config.middleware`. |
|
228 | 228 | """ |
|
229 | 229 | from paste.deploy.loadwsgi import loadcontext, APP |
|
230 | 230 | from rhodecode.config.middleware import ( |
|
231 | 231 | sanitize_settings_and_apply_defaults) |
|
232 | 232 | context = loadcontext(APP, 'config:' + pylons_config) |
|
233 | 233 | settings = sanitize_settings_and_apply_defaults(context.config()) |
|
234 | 234 | return settings |
|
235 | 235 | |
|
236 | 236 | |
|
237 | 237 | LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user')) |
|
238 | 238 | |
|
239 | 239 | |
|
240 | 240 | def _autologin_user(app, *args): |
|
241 | 241 | session = login_user_session(app, *args) |
|
242 | 242 | csrf_token = rhodecode.lib.auth.get_csrf_token(session) |
|
243 | 243 | return LoginData(csrf_token, session['rhodecode_user']) |
|
244 | 244 | |
|
245 | 245 | |
|
246 | 246 | @pytest.fixture |
|
247 | 247 | def autologin_user(app): |
|
248 | 248 | """ |
|
249 | 249 | Utility fixture which makes sure that the admin user is logged in |
|
250 | 250 | """ |
|
251 | 251 | return _autologin_user(app) |
|
252 | 252 | |
|
253 | 253 | |
|
254 | 254 | @pytest.fixture |
|
255 | 255 | def autologin_regular_user(app): |
|
256 | 256 | """ |
|
257 | 257 | Utility fixture which makes sure that the regular user is logged in |
|
258 | 258 | """ |
|
259 | 259 | return _autologin_user( |
|
260 | 260 | app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS) |
|
261 | 261 | |
|
262 | 262 | |
|
263 | 263 | @pytest.fixture(scope='function') |
|
264 | 264 | def csrf_token(request, autologin_user): |
|
265 | 265 | return autologin_user.csrf_token |
|
266 | 266 | |
|
267 | 267 | |
|
268 | 268 | @pytest.fixture(scope='function') |
|
269 | 269 | def xhr_header(request): |
|
270 | 270 | return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'} |
|
271 | 271 | |
|
272 | 272 | |
|
273 | 273 | @pytest.fixture |
|
274 | 274 | def real_crypto_backend(monkeypatch): |
|
275 | 275 | """ |
|
276 | 276 | Switch the production crypto backend on for this test. |
|
277 | 277 | |
|
278 | 278 | During the test run the crypto backend is replaced with a faster |
|
279 | 279 | implementation based on the MD5 algorithm. |
|
280 | 280 | """ |
|
281 | 281 | monkeypatch.setattr(rhodecode, 'is_test', False) |
|
282 | 282 | |
|
283 | 283 | |
|
284 | 284 | @pytest.fixture(scope='class') |
|
285 | 285 | def index_location(request, pylonsapp): |
|
286 | 286 | index_location = pylonsapp.config['app_conf']['search.location'] |
|
287 | 287 | if request.cls: |
|
288 | 288 | request.cls.index_location = index_location |
|
289 | 289 | return index_location |
|
290 | 290 | |
|
291 | 291 | |
|
292 | 292 | @pytest.fixture(scope='session', autouse=True) |
|
293 | 293 | def tests_tmp_path(request): |
|
294 | 294 | """ |
|
295 | 295 | Create temporary directory to be used during the test session. |
|
296 | 296 | """ |
|
297 | 297 | if not os.path.exists(TESTS_TMP_PATH): |
|
298 | 298 | os.makedirs(TESTS_TMP_PATH) |
|
299 | 299 | |
|
300 | 300 | if not request.config.getoption('--keep-tmp-path'): |
|
301 | 301 | @request.addfinalizer |
|
302 | 302 | def remove_tmp_path(): |
|
303 | 303 | shutil.rmtree(TESTS_TMP_PATH) |
|
304 | 304 | |
|
305 | 305 | return TESTS_TMP_PATH |
|
306 | 306 | |
|
307 | 307 | |
|
308 | 308 | @pytest.fixture(scope='session', autouse=True) |
|
309 | 309 | def patch_pyro_request_scope_proxy_factory(request): |
|
310 | 310 | """ |
|
311 | 311 | Patch the pyro proxy factory to always use the same dummy request object |
|
312 | 312 | when under test. This will return the same pyro proxy on every call. |
|
313 | 313 | """ |
|
314 | 314 | dummy_request = pyramid.testing.DummyRequest() |
|
315 | 315 | |
|
316 | 316 | def mocked_call(self, request=None): |
|
317 | 317 | return self.getProxy(request=dummy_request) |
|
318 | 318 | |
|
319 | 319 | patcher = mock.patch( |
|
320 | 320 | 'rhodecode.lib.vcs.client.RequestScopeProxyFactory.__call__', |
|
321 | 321 | new=mocked_call) |
|
322 | 322 | patcher.start() |
|
323 | 323 | |
|
324 | 324 | @request.addfinalizer |
|
325 | 325 | def undo_patching(): |
|
326 | 326 | patcher.stop() |
|
327 | 327 | |
|
328 | 328 | |
|
329 | 329 | @pytest.fixture |
|
330 | 330 | def test_repo_group(request): |
|
331 | 331 | """ |
|
332 | 332 | Create a temporary repository group, and destroy it after |
|
333 | 333 | usage automatically |
|
334 | 334 | """ |
|
335 | 335 | fixture = Fixture() |
|
336 | 336 | repogroupid = 'test_repo_group_%s' % int(time.time()) |
|
337 | 337 | repo_group = fixture.create_repo_group(repogroupid) |
|
338 | 338 | |
|
339 | 339 | def _cleanup(): |
|
340 | 340 | fixture.destroy_repo_group(repogroupid) |
|
341 | 341 | |
|
342 | 342 | request.addfinalizer(_cleanup) |
|
343 | 343 | return repo_group |
|
344 | 344 | |
|
345 | 345 | |
|
346 | 346 | @pytest.fixture |
|
347 | 347 | def test_user_group(request): |
|
348 | 348 | """ |
|
349 | 349 | Create a temporary user group, and destroy it after |
|
350 | 350 | usage automatically |
|
351 | 351 | """ |
|
352 | 352 | fixture = Fixture() |
|
353 | 353 | usergroupid = 'test_user_group_%s' % int(time.time()) |
|
354 | 354 | user_group = fixture.create_user_group(usergroupid) |
|
355 | 355 | |
|
356 | 356 | def _cleanup(): |
|
357 | 357 | fixture.destroy_user_group(user_group) |
|
358 | 358 | |
|
359 | 359 | request.addfinalizer(_cleanup) |
|
360 | 360 | return user_group |
|
361 | 361 | |
|
362 | 362 | |
|
363 | 363 | @pytest.fixture(scope='session') |
|
364 | 364 | def test_repo(request): |
|
365 | 365 | container = TestRepoContainer() |
|
366 | 366 | request.addfinalizer(container._cleanup) |
|
367 | 367 | return container |
|
368 | 368 | |
|
369 | 369 | |
|
370 | 370 | class TestRepoContainer(object): |
|
371 | 371 | """ |
|
372 | 372 | Container for test repositories which are used read only. |
|
373 | 373 | |
|
374 | 374 | Repositories will be created on demand and re-used during the lifetime |
|
375 | 375 | of this object. |
|
376 | 376 | |
|
377 | 377 | Usage to get the svn test repository "minimal":: |
|
378 | 378 | |
|
379 | 379 | test_repo = TestContainer() |
|
380 | 380 | repo = test_repo('minimal', 'svn') |
|
381 | 381 | |
|
382 | 382 | """ |
|
383 | 383 | |
|
384 | 384 | dump_extractors = { |
|
385 | 385 | 'git': utils.extract_git_repo_from_dump, |
|
386 | 386 | 'hg': utils.extract_hg_repo_from_dump, |
|
387 | 387 | 'svn': utils.extract_svn_repo_from_dump, |
|
388 | 388 | } |
|
389 | 389 | |
|
390 | 390 | def __init__(self): |
|
391 | 391 | self._cleanup_repos = [] |
|
392 | 392 | self._fixture = Fixture() |
|
393 | 393 | self._repos = {} |
|
394 | 394 | |
|
395 | 395 | def __call__(self, dump_name, backend_alias): |
|
396 | 396 | key = (dump_name, backend_alias) |
|
397 | 397 | if key not in self._repos: |
|
398 | 398 | repo = self._create_repo(dump_name, backend_alias) |
|
399 | 399 | self._repos[key] = repo.repo_id |
|
400 | 400 | return Repository.get(self._repos[key]) |
|
401 | 401 | |
|
402 | 402 | def _create_repo(self, dump_name, backend_alias): |
|
403 | 403 | repo_name = '%s-%s' % (backend_alias, dump_name) |
|
404 | 404 | backend_class = get_backend(backend_alias) |
|
405 | 405 | dump_extractor = self.dump_extractors[backend_alias] |
|
406 | 406 | repo_path = dump_extractor(dump_name, repo_name) |
|
407 | 407 | vcs_repo = backend_class(repo_path) |
|
408 | 408 | repo2db_mapper({repo_name: vcs_repo}) |
|
409 | 409 | repo = RepoModel().get_by_repo_name(repo_name) |
|
410 | 410 | self._cleanup_repos.append(repo_name) |
|
411 | 411 | return repo |
|
412 | 412 | |
|
413 | 413 | def _cleanup(self): |
|
414 | 414 | for repo_name in reversed(self._cleanup_repos): |
|
415 | 415 | self._fixture.destroy_repo(repo_name) |
|
416 | 416 | |
|
417 | 417 | |
|
418 | 418 | @pytest.fixture |
|
419 | 419 | def backend(request, backend_alias, pylonsapp, test_repo): |
|
420 | 420 | """ |
|
421 | 421 | Parametrized fixture which represents a single backend implementation. |
|
422 | 422 | |
|
423 | 423 | It respects the option `--backends` to focus the test run on specific |
|
424 | 424 | backend implementations. |
|
425 | 425 | |
|
426 | 426 | It also supports `pytest.mark.xfail_backends` to mark tests as failing |
|
427 | 427 | for specific backends. This is intended as a utility for incremental |
|
428 | 428 | development of a new backend implementation. |
|
429 | 429 | """ |
|
430 | 430 | if backend_alias not in request.config.getoption('--backends'): |
|
431 | 431 | pytest.skip("Backend %s not selected." % (backend_alias, )) |
|
432 | 432 | |
|
433 | 433 | utils.check_xfail_backends(request.node, backend_alias) |
|
434 | 434 | utils.check_skip_backends(request.node, backend_alias) |
|
435 | 435 | |
|
436 | 436 | repo_name = 'vcs_test_%s' % (backend_alias, ) |
|
437 | 437 | backend = Backend( |
|
438 | 438 | alias=backend_alias, |
|
439 | 439 | repo_name=repo_name, |
|
440 | 440 | test_name=request.node.name, |
|
441 | 441 | test_repo_container=test_repo) |
|
442 | 442 | request.addfinalizer(backend.cleanup) |
|
443 | 443 | return backend |
|
444 | 444 | |
|
445 | 445 | |
|
446 | 446 | @pytest.fixture |
|
447 | 447 | def backend_git(request, pylonsapp, test_repo): |
|
448 | 448 | return backend(request, 'git', pylonsapp, test_repo) |
|
449 | 449 | |
|
450 | 450 | |
|
451 | 451 | @pytest.fixture |
|
452 | 452 | def backend_hg(request, pylonsapp, test_repo): |
|
453 | 453 | return backend(request, 'hg', pylonsapp, test_repo) |
|
454 | 454 | |
|
455 | 455 | |
|
456 | 456 | @pytest.fixture |
|
457 | 457 | def backend_svn(request, pylonsapp, test_repo): |
|
458 | 458 | return backend(request, 'svn', pylonsapp, test_repo) |
|
459 | 459 | |
|
460 | 460 | |
|
461 | 461 | @pytest.fixture |
|
462 | 462 | def backend_random(backend_git): |
|
463 | 463 | """ |
|
464 | 464 | Use this to express that your tests need "a backend. |
|
465 | 465 | |
|
466 | 466 | A few of our tests need a backend, so that we can run the code. This |
|
467 | 467 | fixture is intended to be used for such cases. It will pick one of the |
|
468 | 468 | backends and run the tests. |
|
469 | 469 | |
|
470 | 470 | The fixture `backend` would run the test multiple times for each |
|
471 | 471 | available backend which is a pure waste of time if the test is |
|
472 | 472 | independent of the backend type. |
|
473 | 473 | """ |
|
474 | 474 | # TODO: johbo: Change this to pick a random backend |
|
475 | 475 | return backend_git |
|
476 | 476 | |
|
477 | 477 | |
|
478 | 478 | @pytest.fixture |
|
479 | 479 | def backend_stub(backend_git): |
|
480 | 480 | """ |
|
481 | 481 | Use this to express that your tests need a backend stub |
|
482 | 482 | |
|
483 | 483 | TODO: mikhail: Implement a real stub logic instead of returning |
|
484 | 484 | a git backend |
|
485 | 485 | """ |
|
486 | 486 | return backend_git |
|
487 | 487 | |
|
488 | 488 | |
|
489 | 489 | @pytest.fixture |
|
490 | 490 | def repo_stub(backend_stub): |
|
491 | 491 | """ |
|
492 | 492 | Use this to express that your tests need a repository stub |
|
493 | 493 | """ |
|
494 | 494 | return backend_stub.create_repo() |
|
495 | 495 | |
|
496 | 496 | |
|
497 | 497 | class Backend(object): |
|
498 | 498 | """ |
|
499 | 499 | Represents the test configuration for one supported backend |
|
500 | 500 | |
|
501 | 501 | Provides easy access to different test repositories based on |
|
502 | 502 | `__getitem__`. Such repositories will only be created once per test |
|
503 | 503 | session. |
|
504 | 504 | """ |
|
505 | 505 | |
|
506 | 506 | invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+') |
|
507 | 507 | _master_repo = None |
|
508 | 508 | _commit_ids = {} |
|
509 | 509 | |
|
510 | 510 | def __init__(self, alias, repo_name, test_name, test_repo_container): |
|
511 | 511 | self.alias = alias |
|
512 | 512 | self.repo_name = repo_name |
|
513 | 513 | self._cleanup_repos = [] |
|
514 | 514 | self._test_name = test_name |
|
515 | 515 | self._test_repo_container = test_repo_container |
|
516 | 516 | # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or |
|
517 | 517 | # Fixture will survive in the end. |
|
518 | 518 | self._fixture = Fixture() |
|
519 | 519 | |
|
520 | 520 | def __getitem__(self, key): |
|
521 | 521 | return self._test_repo_container(key, self.alias) |
|
522 | 522 | |
|
523 | 523 | @property |
|
524 | 524 | def repo(self): |
|
525 | 525 | """ |
|
526 | 526 | Returns the "current" repository. This is the vcs_test repo or the |
|
527 | 527 | last repo which has been created with `create_repo`. |
|
528 | 528 | """ |
|
529 | 529 | from rhodecode.model.db import Repository |
|
530 | 530 | return Repository.get_by_repo_name(self.repo_name) |
|
531 | 531 | |
|
532 | 532 | @property |
|
533 | 533 | def default_branch_name(self): |
|
534 | 534 | VcsRepository = get_backend(self.alias) |
|
535 | 535 | return VcsRepository.DEFAULT_BRANCH_NAME |
|
536 | 536 | |
|
537 | 537 | @property |
|
538 | 538 | def default_head_id(self): |
|
539 | 539 | """ |
|
540 | 540 | Returns the default head id of the underlying backend. |
|
541 | 541 | |
|
542 | 542 | This will be the default branch name in case the backend does have a |
|
543 | 543 | default branch. In the other cases it will point to a valid head |
|
544 | 544 | which can serve as the base to create a new commit on top of it. |
|
545 | 545 | """ |
|
546 | 546 | vcsrepo = self.repo.scm_instance() |
|
547 | 547 | head_id = ( |
|
548 | 548 | vcsrepo.DEFAULT_BRANCH_NAME or |
|
549 | 549 | vcsrepo.commit_ids[-1]) |
|
550 | 550 | return head_id |
|
551 | 551 | |
|
552 | 552 | @property |
|
553 | 553 | def commit_ids(self): |
|
554 | 554 | """ |
|
555 | 555 | Returns the list of commits for the last created repository |
|
556 | 556 | """ |
|
557 | 557 | return self._commit_ids |
|
558 | 558 | |
|
559 | 559 | def create_master_repo(self, commits): |
|
560 | 560 | """ |
|
561 | 561 | Create a repository and remember it as a template. |
|
562 | 562 | |
|
563 | 563 | This allows to easily create derived repositories to construct |
|
564 | 564 | more complex scenarios for diff, compare and pull requests. |
|
565 | 565 | |
|
566 | 566 | Returns a commit map which maps from commit message to raw_id. |
|
567 | 567 | """ |
|
568 | 568 | self._master_repo = self.create_repo(commits=commits) |
|
569 | 569 | return self._commit_ids |
|
570 | 570 | |
|
571 | 571 | def create_repo( |
|
572 | 572 | self, commits=None, number_of_commits=0, heads=None, |
|
573 | 573 | name_suffix=u'', **kwargs): |
|
574 | 574 | """ |
|
575 | 575 | Create a repository and record it for later cleanup. |
|
576 | 576 | |
|
577 | 577 | :param commits: Optional. A sequence of dict instances. |
|
578 | 578 | Will add a commit per entry to the new repository. |
|
579 | 579 | :param number_of_commits: Optional. If set to a number, this number of |
|
580 | 580 | commits will be added to the new repository. |
|
581 | 581 | :param heads: Optional. Can be set to a sequence of of commit |
|
582 | 582 | names which shall be pulled in from the master repository. |
|
583 | 583 | |
|
584 | 584 | """ |
|
585 | 585 | self.repo_name = self._next_repo_name() + name_suffix |
|
586 | 586 | repo = self._fixture.create_repo( |
|
587 | 587 | self.repo_name, repo_type=self.alias, **kwargs) |
|
588 | 588 | self._cleanup_repos.append(repo.repo_name) |
|
589 | 589 | |
|
590 | 590 | commits = commits or [ |
|
591 | 591 | {'message': 'Commit %s of %s' % (x, self.repo_name)} |
|
592 | 592 | for x in xrange(number_of_commits)] |
|
593 | 593 | self._add_commits_to_repo(repo.scm_instance(), commits) |
|
594 | 594 | if heads: |
|
595 | 595 | self.pull_heads(repo, heads) |
|
596 | 596 | |
|
597 | 597 | return repo |
|
598 | 598 | |
|
599 | 599 | def pull_heads(self, repo, heads): |
|
600 | 600 | """ |
|
601 | 601 | Make sure that repo contains all commits mentioned in `heads` |
|
602 | 602 | """ |
|
603 | 603 | vcsmaster = self._master_repo.scm_instance() |
|
604 | 604 | vcsrepo = repo.scm_instance() |
|
605 | 605 | vcsrepo.config.clear_section('hooks') |
|
606 | 606 | commit_ids = [self._commit_ids[h] for h in heads] |
|
607 | 607 | vcsrepo.pull(vcsmaster.path, commit_ids=commit_ids) |
|
608 | 608 | |
|
609 | 609 | def create_fork(self): |
|
610 | 610 | repo_to_fork = self.repo_name |
|
611 | 611 | self.repo_name = self._next_repo_name() |
|
612 | 612 | repo = self._fixture.create_fork(repo_to_fork, self.repo_name) |
|
613 | 613 | self._cleanup_repos.append(self.repo_name) |
|
614 | 614 | return repo |
|
615 | 615 | |
|
616 | 616 | def new_repo_name(self, suffix=u''): |
|
617 | 617 | self.repo_name = self._next_repo_name() + suffix |
|
618 | 618 | self._cleanup_repos.append(self.repo_name) |
|
619 | 619 | return self.repo_name |
|
620 | 620 | |
|
621 | 621 | def _next_repo_name(self): |
|
622 | 622 | return u"%s_%s" % ( |
|
623 | 623 | self.invalid_repo_name.sub(u'_', self._test_name), |
|
624 | 624 | len(self._cleanup_repos)) |
|
625 | 625 | |
|
626 | 626 | def ensure_file(self, filename, content='Test content\n'): |
|
627 | 627 | assert self._cleanup_repos, "Avoid writing into vcs_test repos" |
|
628 | 628 | commits = [ |
|
629 | 629 | {'added': [ |
|
630 | 630 | FileNode(filename, content=content), |
|
631 | 631 | ]}, |
|
632 | 632 | ] |
|
633 | 633 | self._add_commits_to_repo(self.repo.scm_instance(), commits) |
|
634 | 634 | |
|
635 | 635 | def enable_downloads(self): |
|
636 | 636 | repo = self.repo |
|
637 | 637 | repo.enable_downloads = True |
|
638 | 638 | Session().add(repo) |
|
639 | 639 | Session().commit() |
|
640 | 640 | |
|
641 | 641 | def cleanup(self): |
|
642 | 642 | for repo_name in reversed(self._cleanup_repos): |
|
643 | 643 | self._fixture.destroy_repo(repo_name) |
|
644 | 644 | |
|
645 | 645 | def _add_commits_to_repo(self, repo, commits): |
|
646 | 646 | commit_ids = _add_commits_to_repo(repo, commits) |
|
647 | 647 | if not commit_ids: |
|
648 | 648 | return |
|
649 | 649 | self._commit_ids = commit_ids |
|
650 | 650 | |
|
651 | 651 | # Creating refs for Git to allow fetching them from remote repository |
|
652 | 652 | if self.alias == 'git': |
|
653 | 653 | refs = {} |
|
654 | 654 | for message in self._commit_ids: |
|
655 | 655 | # TODO: mikhail: do more special chars replacements |
|
656 | 656 | ref_name = 'refs/test-refs/{}'.format( |
|
657 | 657 | message.replace(' ', '')) |
|
658 | 658 | refs[ref_name] = self._commit_ids[message] |
|
659 | 659 | self._create_refs(repo, refs) |
|
660 | 660 | |
|
661 | 661 | def _create_refs(self, repo, refs): |
|
662 | 662 | for ref_name in refs: |
|
663 | 663 | repo.set_refs(ref_name, refs[ref_name]) |
|
664 | 664 | |
|
665 | 665 | |
|
666 | 666 | @pytest.fixture |
|
667 | 667 | def vcsbackend(request, backend_alias, tests_tmp_path, pylonsapp, test_repo): |
|
668 | 668 | """ |
|
669 | 669 | Parametrized fixture which represents a single vcs backend implementation. |
|
670 | 670 | |
|
671 | 671 | See the fixture `backend` for more details. This one implements the same |
|
672 | 672 | concept, but on vcs level. So it does not provide model instances etc. |
|
673 | 673 | |
|
674 | 674 | Parameters are generated dynamically, see :func:`pytest_generate_tests` |
|
675 | 675 | for how this works. |
|
676 | 676 | """ |
|
677 | 677 | if backend_alias not in request.config.getoption('--backends'): |
|
678 | 678 | pytest.skip("Backend %s not selected." % (backend_alias, )) |
|
679 | 679 | |
|
680 | 680 | utils.check_xfail_backends(request.node, backend_alias) |
|
681 | 681 | utils.check_skip_backends(request.node, backend_alias) |
|
682 | 682 | |
|
683 | 683 | repo_name = 'vcs_test_%s' % (backend_alias, ) |
|
684 | 684 | repo_path = os.path.join(tests_tmp_path, repo_name) |
|
685 | 685 | backend = VcsBackend( |
|
686 | 686 | alias=backend_alias, |
|
687 | 687 | repo_path=repo_path, |
|
688 | 688 | test_name=request.node.name, |
|
689 | 689 | test_repo_container=test_repo) |
|
690 | 690 | request.addfinalizer(backend.cleanup) |
|
691 | 691 | return backend |
|
692 | 692 | |
|
693 | 693 | |
|
694 | 694 | @pytest.fixture |
|
695 | 695 | def vcsbackend_git(request, tests_tmp_path, pylonsapp, test_repo): |
|
696 | 696 | return vcsbackend(request, 'git', tests_tmp_path, pylonsapp, test_repo) |
|
697 | 697 | |
|
698 | 698 | |
|
699 | 699 | @pytest.fixture |
|
700 | 700 | def vcsbackend_hg(request, tests_tmp_path, pylonsapp, test_repo): |
|
701 | 701 | return vcsbackend(request, 'hg', tests_tmp_path, pylonsapp, test_repo) |
|
702 | 702 | |
|
703 | 703 | |
|
704 | 704 | @pytest.fixture |
|
705 | 705 | def vcsbackend_svn(request, tests_tmp_path, pylonsapp, test_repo): |
|
706 | 706 | return vcsbackend(request, 'svn', tests_tmp_path, pylonsapp, test_repo) |
|
707 | 707 | |
|
708 | 708 | |
|
709 | 709 | @pytest.fixture |
|
710 | 710 | def vcsbackend_random(vcsbackend_git): |
|
711 | 711 | """ |
|
712 | 712 | Use this to express that your tests need "a vcsbackend". |
|
713 | 713 | |
|
714 | 714 | The fixture `vcsbackend` would run the test multiple times for each |
|
715 | 715 | available vcs backend which is a pure waste of time if the test is |
|
716 | 716 | independent of the vcs backend type. |
|
717 | 717 | """ |
|
718 | 718 | # TODO: johbo: Change this to pick a random backend |
|
719 | 719 | return vcsbackend_git |
|
720 | 720 | |
|
721 | 721 | |
|
722 | 722 | @pytest.fixture |
|
723 | 723 | def vcsbackend_stub(vcsbackend_git): |
|
724 | 724 | """ |
|
725 | 725 | Use this to express that your test just needs a stub of a vcsbackend. |
|
726 | 726 | |
|
727 | 727 | Plan is to eventually implement an in-memory stub to speed tests up. |
|
728 | 728 | """ |
|
729 | 729 | return vcsbackend_git |
|
730 | 730 | |
|
731 | 731 | |
|
732 | 732 | class VcsBackend(object): |
|
733 | 733 | """ |
|
734 | 734 | Represents the test configuration for one supported vcs backend. |
|
735 | 735 | """ |
|
736 | 736 | |
|
737 | 737 | invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+') |
|
738 | 738 | |
|
739 | 739 | def __init__(self, alias, repo_path, test_name, test_repo_container): |
|
740 | 740 | self.alias = alias |
|
741 | 741 | self._repo_path = repo_path |
|
742 | 742 | self._cleanup_repos = [] |
|
743 | 743 | self._test_name = test_name |
|
744 | 744 | self._test_repo_container = test_repo_container |
|
745 | 745 | |
|
746 | 746 | def __getitem__(self, key): |
|
747 | 747 | return self._test_repo_container(key, self.alias).scm_instance() |
|
748 | 748 | |
|
749 | 749 | @property |
|
750 | 750 | def repo(self): |
|
751 | 751 | """ |
|
752 | 752 | Returns the "current" repository. This is the vcs_test repo of the last |
|
753 | 753 | repo which has been created. |
|
754 | 754 | """ |
|
755 | 755 | Repository = get_backend(self.alias) |
|
756 | 756 | return Repository(self._repo_path) |
|
757 | 757 | |
|
758 | 758 | @property |
|
759 | 759 | def backend(self): |
|
760 | 760 | """ |
|
761 | 761 | Returns the backend implementation class. |
|
762 | 762 | """ |
|
763 | 763 | return get_backend(self.alias) |
|
764 | 764 | |
|
765 | 765 | def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None): |
|
766 | 766 | repo_name = self._next_repo_name() |
|
767 | 767 | self._repo_path = get_new_dir(repo_name) |
|
768 | 768 | repo_class = get_backend(self.alias) |
|
769 | 769 | src_url = None |
|
770 | 770 | if _clone_repo: |
|
771 | 771 | src_url = _clone_repo.path |
|
772 | 772 | repo = repo_class(self._repo_path, create=True, src_url=src_url) |
|
773 | 773 | self._cleanup_repos.append(repo) |
|
774 | 774 | |
|
775 | 775 | commits = commits or [ |
|
776 | 776 | {'message': 'Commit %s of %s' % (x, repo_name)} |
|
777 | 777 | for x in xrange(number_of_commits)] |
|
778 | 778 | _add_commits_to_repo(repo, commits) |
|
779 | 779 | return repo |
|
780 | 780 | |
|
781 | 781 | def clone_repo(self, repo): |
|
782 | 782 | return self.create_repo(_clone_repo=repo) |
|
783 | 783 | |
|
784 | 784 | def cleanup(self): |
|
785 | 785 | for repo in self._cleanup_repos: |
|
786 | 786 | shutil.rmtree(repo.path) |
|
787 | 787 | |
|
788 | 788 | def new_repo_path(self): |
|
789 | 789 | repo_name = self._next_repo_name() |
|
790 | 790 | self._repo_path = get_new_dir(repo_name) |
|
791 | 791 | return self._repo_path |
|
792 | 792 | |
|
793 | 793 | def _next_repo_name(self): |
|
794 | 794 | return "%s_%s" % ( |
|
795 | 795 | self.invalid_repo_name.sub('_', self._test_name), |
|
796 | 796 | len(self._cleanup_repos)) |
|
797 | 797 | |
|
798 | 798 | def add_file(self, repo, filename, content='Test content\n'): |
|
799 | 799 | imc = repo.in_memory_commit |
|
800 | 800 | imc.add(FileNode(filename, content=content)) |
|
801 | 801 | imc.commit( |
|
802 | 802 | message=u'Automatic commit from vcsbackend fixture', |
|
803 | 803 | author=u'Automatic') |
|
804 | 804 | |
|
805 | 805 | def ensure_file(self, filename, content='Test content\n'): |
|
806 | 806 | assert self._cleanup_repos, "Avoid writing into vcs_test repos" |
|
807 | 807 | self.add_file(self.repo, filename, content) |
|
808 | 808 | |
|
809 | 809 | |
|
810 | 810 | def _add_commits_to_repo(vcs_repo, commits): |
|
811 | 811 | commit_ids = {} |
|
812 | 812 | if not commits: |
|
813 | 813 | return commit_ids |
|
814 | 814 | |
|
815 | 815 | imc = vcs_repo.in_memory_commit |
|
816 | 816 | commit = None |
|
817 | 817 | |
|
818 | 818 | for idx, commit in enumerate(commits): |
|
819 | 819 | message = unicode(commit.get('message', 'Commit %s' % idx)) |
|
820 | 820 | |
|
821 | 821 | for node in commit.get('added', []): |
|
822 | 822 | imc.add(FileNode(node.path, content=node.content)) |
|
823 | 823 | for node in commit.get('changed', []): |
|
824 | 824 | imc.change(FileNode(node.path, content=node.content)) |
|
825 | 825 | for node in commit.get('removed', []): |
|
826 | 826 | imc.remove(FileNode(node.path)) |
|
827 | 827 | |
|
828 | 828 | parents = [ |
|
829 | 829 | vcs_repo.get_commit(commit_id=commit_ids[p]) |
|
830 | 830 | for p in commit.get('parents', [])] |
|
831 | 831 | |
|
832 | 832 | operations = ('added', 'changed', 'removed') |
|
833 | 833 | if not any((commit.get(o) for o in operations)): |
|
834 | 834 | imc.add(FileNode('file_%s' % idx, content=message)) |
|
835 | 835 | |
|
836 | 836 | commit = imc.commit( |
|
837 | 837 | message=message, |
|
838 | 838 | author=unicode(commit.get('author', 'Automatic')), |
|
839 | 839 | date=commit.get('date'), |
|
840 | 840 | branch=commit.get('branch'), |
|
841 | 841 | parents=parents) |
|
842 | 842 | |
|
843 | 843 | commit_ids[commit.message] = commit.raw_id |
|
844 | 844 | |
|
845 | 845 | return commit_ids |
|
846 | 846 | |
|
847 | 847 | |
|
848 | 848 | @pytest.fixture |
|
849 | 849 | def reposerver(request): |
|
850 | 850 | """ |
|
851 | 851 | Allows to serve a backend repository |
|
852 | 852 | """ |
|
853 | 853 | |
|
854 | 854 | repo_server = RepoServer() |
|
855 | 855 | request.addfinalizer(repo_server.cleanup) |
|
856 | 856 | return repo_server |
|
857 | 857 | |
|
858 | 858 | |
|
859 | 859 | class RepoServer(object): |
|
860 | 860 | """ |
|
861 | 861 | Utility to serve a local repository for the duration of a test case. |
|
862 | 862 | |
|
863 | 863 | Supports only Subversion so far. |
|
864 | 864 | """ |
|
865 | 865 | |
|
866 | 866 | url = None |
|
867 | 867 | |
|
868 | 868 | def __init__(self): |
|
869 | 869 | self._cleanup_servers = [] |
|
870 | 870 | |
|
871 | 871 | def serve(self, vcsrepo): |
|
872 | 872 | if vcsrepo.alias != 'svn': |
|
873 | 873 | raise TypeError("Backend %s not supported" % vcsrepo.alias) |
|
874 | 874 | |
|
875 | 875 | proc = subprocess.Popen( |
|
876 | 876 | ['svnserve', '-d', '--foreground', '--listen-host', 'localhost', |
|
877 | 877 | '--root', vcsrepo.path]) |
|
878 | 878 | self._cleanup_servers.append(proc) |
|
879 | 879 | self.url = 'svn://localhost' |
|
880 | 880 | |
|
881 | 881 | def cleanup(self): |
|
882 | 882 | for proc in self._cleanup_servers: |
|
883 | 883 | proc.terminate() |
|
884 | 884 | |
|
885 | 885 | |
|
886 | 886 | @pytest.fixture |
|
887 | 887 | def pr_util(backend, request): |
|
888 | 888 | """ |
|
889 | 889 | Utility for tests of models and for functional tests around pull requests. |
|
890 | 890 | |
|
891 | 891 | It gives an instance of :class:`PRTestUtility` which provides various |
|
892 | 892 | utility methods around one pull request. |
|
893 | 893 | |
|
894 | 894 | This fixture uses `backend` and inherits its parameterization. |
|
895 | 895 | """ |
|
896 | 896 | |
|
897 | 897 | util = PRTestUtility(backend) |
|
898 | 898 | |
|
899 | 899 | @request.addfinalizer |
|
900 | 900 | def cleanup(): |
|
901 | 901 | util.cleanup() |
|
902 | 902 | |
|
903 | 903 | return util |
|
904 | 904 | |
|
905 | 905 | |
|
906 | 906 | class PRTestUtility(object): |
|
907 | 907 | |
|
908 | 908 | pull_request = None |
|
909 | 909 | pull_request_id = None |
|
910 | 910 | mergeable_patcher = None |
|
911 | 911 | mergeable_mock = None |
|
912 | 912 | notification_patcher = None |
|
913 | 913 | |
|
914 | 914 | def __init__(self, backend): |
|
915 | 915 | self.backend = backend |
|
916 | 916 | |
|
917 | 917 | def create_pull_request( |
|
918 | 918 | self, commits=None, target_head=None, source_head=None, |
|
919 | 919 | revisions=None, approved=False, author=None, mergeable=False, |
|
920 | 920 | enable_notifications=True, name_suffix=u'', reviewers=None, |
|
921 | 921 | title=u"Test", description=u"Description"): |
|
922 | 922 | self.set_mergeable(mergeable) |
|
923 | 923 | if not enable_notifications: |
|
924 | 924 | # mock notification side effect |
|
925 | 925 | self.notification_patcher = mock.patch( |
|
926 | 926 | 'rhodecode.model.notification.NotificationModel.create') |
|
927 | 927 | self.notification_patcher.start() |
|
928 | 928 | |
|
929 | 929 | if not self.pull_request: |
|
930 | 930 | if not commits: |
|
931 | 931 | commits = [ |
|
932 | 932 | {'message': 'c1'}, |
|
933 | 933 | {'message': 'c2'}, |
|
934 | 934 | {'message': 'c3'}, |
|
935 | 935 | ] |
|
936 | 936 | target_head = 'c1' |
|
937 | 937 | source_head = 'c2' |
|
938 | 938 | revisions = ['c2'] |
|
939 | 939 | |
|
940 | 940 | self.commit_ids = self.backend.create_master_repo(commits) |
|
941 | 941 | self.target_repository = self.backend.create_repo( |
|
942 | 942 | heads=[target_head], name_suffix=name_suffix) |
|
943 | 943 | self.source_repository = self.backend.create_repo( |
|
944 | 944 | heads=[source_head], name_suffix=name_suffix) |
|
945 | 945 | self.author = author or UserModel().get_by_username( |
|
946 | 946 | TEST_USER_ADMIN_LOGIN) |
|
947 | 947 | |
|
948 | 948 | model = PullRequestModel() |
|
949 | 949 | self.create_parameters = { |
|
950 | 950 | 'created_by': self.author, |
|
951 | 951 | 'source_repo': self.source_repository.repo_name, |
|
952 | 952 | 'source_ref': self._default_branch_reference(source_head), |
|
953 | 953 | 'target_repo': self.target_repository.repo_name, |
|
954 | 954 | 'target_ref': self._default_branch_reference(target_head), |
|
955 | 955 | 'revisions': [self.commit_ids[r] for r in revisions], |
|
956 | 956 | 'reviewers': reviewers or self._get_reviewers(), |
|
957 | 957 | 'title': title, |
|
958 | 958 | 'description': description, |
|
959 | 959 | } |
|
960 | 960 | self.pull_request = model.create(**self.create_parameters) |
|
961 | 961 | assert model.get_versions(self.pull_request) == [] |
|
962 | 962 | |
|
963 | 963 | self.pull_request_id = self.pull_request.pull_request_id |
|
964 | 964 | |
|
965 | 965 | if approved: |
|
966 | 966 | self.approve() |
|
967 | 967 | |
|
968 | 968 | Session().add(self.pull_request) |
|
969 | 969 | Session().commit() |
|
970 | 970 | |
|
971 | 971 | return self.pull_request |
|
972 | 972 | |
|
973 | 973 | def approve(self): |
|
974 | 974 | self.create_status_votes( |
|
975 | 975 | ChangesetStatus.STATUS_APPROVED, |
|
976 | 976 | *self.pull_request.reviewers) |
|
977 | 977 | |
|
978 | 978 | def close(self): |
|
979 | 979 | PullRequestModel().close_pull_request(self.pull_request, self.author) |
|
980 | 980 | |
|
981 | 981 | def _default_branch_reference(self, commit_message): |
|
982 | 982 | reference = '%s:%s:%s' % ( |
|
983 | 983 | 'branch', |
|
984 | 984 | self.backend.default_branch_name, |
|
985 | 985 | self.commit_ids[commit_message]) |
|
986 | 986 | return reference |
|
987 | 987 | |
|
988 | 988 | def _get_reviewers(self): |
|
989 | 989 | model = UserModel() |
|
990 | 990 | return [ |
|
991 | 991 | model.get_by_username(TEST_USER_REGULAR_LOGIN), |
|
992 | 992 | model.get_by_username(TEST_USER_REGULAR2_LOGIN), |
|
993 | 993 | ] |
|
994 | 994 | |
|
995 | 995 | def update_source_repository(self, head=None): |
|
996 | 996 | heads = [head or 'c3'] |
|
997 | 997 | self.backend.pull_heads(self.source_repository, heads=heads) |
|
998 | 998 | |
|
999 | 999 | def add_one_commit(self, head=None): |
|
1000 | 1000 | self.update_source_repository(head=head) |
|
1001 | 1001 | old_commit_ids = set(self.pull_request.revisions) |
|
1002 | 1002 | PullRequestModel().update_commits(self.pull_request) |
|
1003 | 1003 | commit_ids = set(self.pull_request.revisions) |
|
1004 | 1004 | new_commit_ids = commit_ids - old_commit_ids |
|
1005 | 1005 | assert len(new_commit_ids) == 1 |
|
1006 | 1006 | return new_commit_ids.pop() |
|
1007 | 1007 | |
|
1008 | 1008 | def remove_one_commit(self): |
|
1009 | 1009 | assert len(self.pull_request.revisions) == 2 |
|
1010 | 1010 | source_vcs = self.source_repository.scm_instance() |
|
1011 | 1011 | removed_commit_id = source_vcs.commit_ids[-1] |
|
1012 | 1012 | |
|
1013 | 1013 | # TODO: johbo: Git and Mercurial have an inconsistent vcs api here, |
|
1014 | 1014 | # remove the if once that's sorted out. |
|
1015 | 1015 | if self.backend.alias == "git": |
|
1016 | 1016 | kwargs = {'branch_name': self.backend.default_branch_name} |
|
1017 | 1017 | else: |
|
1018 | 1018 | kwargs = {} |
|
1019 | 1019 | source_vcs.strip(removed_commit_id, **kwargs) |
|
1020 | 1020 | |
|
1021 | 1021 | PullRequestModel().update_commits(self.pull_request) |
|
1022 | 1022 | assert len(self.pull_request.revisions) == 1 |
|
1023 | 1023 | return removed_commit_id |
|
1024 | 1024 | |
|
1025 | 1025 | def create_comment(self, linked_to=None): |
|
1026 | 1026 | comment = ChangesetCommentsModel().create( |
|
1027 | 1027 | text=u"Test comment", |
|
1028 | 1028 | repo=self.target_repository.repo_name, |
|
1029 | 1029 | user=self.author, |
|
1030 | 1030 | pull_request=self.pull_request) |
|
1031 | 1031 | assert comment.pull_request_version_id is None |
|
1032 | 1032 | |
|
1033 | 1033 | if linked_to: |
|
1034 | 1034 | PullRequestModel()._link_comments_to_version(linked_to) |
|
1035 | 1035 | |
|
1036 | 1036 | return comment |
|
1037 | 1037 | |
|
1038 | 1038 | def create_inline_comment( |
|
1039 | 1039 | self, linked_to=None, line_no=u'n1', file_path='file_1'): |
|
1040 | 1040 | comment = ChangesetCommentsModel().create( |
|
1041 | 1041 | text=u"Test comment", |
|
1042 | 1042 | repo=self.target_repository.repo_name, |
|
1043 | 1043 | user=self.author, |
|
1044 | 1044 | line_no=line_no, |
|
1045 | 1045 | f_path=file_path, |
|
1046 | 1046 | pull_request=self.pull_request) |
|
1047 | 1047 | assert comment.pull_request_version_id is None |
|
1048 | 1048 | |
|
1049 | 1049 | if linked_to: |
|
1050 | 1050 | PullRequestModel()._link_comments_to_version(linked_to) |
|
1051 | 1051 | |
|
1052 | 1052 | return comment |
|
1053 | 1053 | |
|
1054 | 1054 | def create_version_of_pull_request(self): |
|
1055 | 1055 | pull_request = self.create_pull_request() |
|
1056 | 1056 | version = PullRequestModel()._create_version_from_snapshot( |
|
1057 | 1057 | pull_request) |
|
1058 | 1058 | return version |
|
1059 | 1059 | |
|
1060 | 1060 | def create_status_votes(self, status, *reviewers): |
|
1061 | 1061 | for reviewer in reviewers: |
|
1062 | 1062 | ChangesetStatusModel().set_status( |
|
1063 | 1063 | repo=self.pull_request.target_repo, |
|
1064 | 1064 | status=status, |
|
1065 | 1065 | user=reviewer.user_id, |
|
1066 | 1066 | pull_request=self.pull_request) |
|
1067 | 1067 | |
|
1068 | 1068 | def set_mergeable(self, value): |
|
1069 | 1069 | if not self.mergeable_patcher: |
|
1070 | 1070 | self.mergeable_patcher = mock.patch.object( |
|
1071 | 1071 | VcsSettingsModel, 'get_general_settings') |
|
1072 | 1072 | self.mergeable_mock = self.mergeable_patcher.start() |
|
1073 | 1073 | self.mergeable_mock.return_value = { |
|
1074 | 1074 | 'rhodecode_pr_merge_enabled': value} |
|
1075 | 1075 | |
|
1076 | 1076 | def cleanup(self): |
|
1077 | 1077 | # In case the source repository is already cleaned up, the pull |
|
1078 | 1078 | # request will already be deleted. |
|
1079 | 1079 | pull_request = PullRequest().get(self.pull_request_id) |
|
1080 | 1080 | if pull_request: |
|
1081 | 1081 | PullRequestModel().delete(pull_request) |
|
1082 | 1082 | Session().commit() |
|
1083 | 1083 | |
|
1084 | 1084 | if self.notification_patcher: |
|
1085 | 1085 | self.notification_patcher.stop() |
|
1086 | 1086 | |
|
1087 | 1087 | if self.mergeable_patcher: |
|
1088 | 1088 | self.mergeable_patcher.stop() |
|
1089 | 1089 | |
|
1090 | 1090 | |
|
1091 | 1091 | @pytest.fixture |
|
1092 | 1092 | def user_admin(pylonsapp): |
|
1093 | 1093 | """ |
|
1094 | 1094 | Provides the default admin test user as an instance of `db.User`. |
|
1095 | 1095 | """ |
|
1096 | 1096 | user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN) |
|
1097 | 1097 | return user |
|
1098 | 1098 | |
|
1099 | 1099 | |
|
1100 | 1100 | @pytest.fixture |
|
1101 | 1101 | def user_regular(pylonsapp): |
|
1102 | 1102 | """ |
|
1103 | 1103 | Provides the default regular test user as an instance of `db.User`. |
|
1104 | 1104 | """ |
|
1105 | 1105 | user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN) |
|
1106 | 1106 | return user |
|
1107 | 1107 | |
|
1108 | 1108 | |
|
1109 | 1109 | @pytest.fixture |
|
1110 | 1110 | def user_util(request, pylonsapp): |
|
1111 | 1111 | """ |
|
1112 | 1112 | Provides a wired instance of `UserUtility` with integrated cleanup. |
|
1113 | 1113 | """ |
|
1114 | 1114 | utility = UserUtility(test_name=request.node.name) |
|
1115 | 1115 | request.addfinalizer(utility.cleanup) |
|
1116 | 1116 | return utility |
|
1117 | 1117 | |
|
1118 | 1118 | |
|
1119 | 1119 | # TODO: johbo: Split this up into utilities per domain or something similar |
|
1120 | 1120 | class UserUtility(object): |
|
1121 | 1121 | |
|
1122 | 1122 | def __init__(self, test_name="test"): |
|
1123 | 1123 | self._test_name = test_name |
|
1124 | 1124 | self.fixture = Fixture() |
|
1125 | 1125 | self.repo_group_ids = [] |
|
1126 | 1126 | self.user_ids = [] |
|
1127 | 1127 | self.user_group_ids = [] |
|
1128 | 1128 | self.user_repo_permission_ids = [] |
|
1129 | 1129 | self.user_group_repo_permission_ids = [] |
|
1130 | 1130 | self.user_repo_group_permission_ids = [] |
|
1131 | 1131 | self.user_group_repo_group_permission_ids = [] |
|
1132 | 1132 | self.user_user_group_permission_ids = [] |
|
1133 | 1133 | self.user_group_user_group_permission_ids = [] |
|
1134 | 1134 | self.user_permissions = [] |
|
1135 | 1135 | |
|
1136 | 1136 | def create_repo_group( |
|
1137 | 1137 | self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True): |
|
1138 | 1138 | group_name = "{prefix}_repogroup_{count}".format( |
|
1139 | 1139 | prefix=self._test_name, |
|
1140 | 1140 | count=len(self.repo_group_ids)) |
|
1141 | 1141 | repo_group = self.fixture.create_repo_group( |
|
1142 | 1142 | group_name, cur_user=owner) |
|
1143 | 1143 | if auto_cleanup: |
|
1144 | 1144 | self.repo_group_ids.append(repo_group.group_id) |
|
1145 | 1145 | return repo_group |
|
1146 | 1146 | |
|
1147 | 1147 | def create_user(self, auto_cleanup=True, **kwargs): |
|
1148 | 1148 | user_name = "{prefix}_user_{count}".format( |
|
1149 | 1149 | prefix=self._test_name, |
|
1150 | 1150 | count=len(self.user_ids)) |
|
1151 | 1151 | user = self.fixture.create_user(user_name, **kwargs) |
|
1152 | 1152 | if auto_cleanup: |
|
1153 | 1153 | self.user_ids.append(user.user_id) |
|
1154 | 1154 | return user |
|
1155 | 1155 | |
|
1156 | 1156 | def create_user_with_group(self): |
|
1157 | 1157 | user = self.create_user() |
|
1158 | 1158 | user_group = self.create_user_group(members=[user]) |
|
1159 | 1159 | return user, user_group |
|
1160 | 1160 | |
|
1161 | 1161 | def create_user_group(self, members=None, auto_cleanup=True, **kwargs): |
|
1162 | 1162 | group_name = "{prefix}_usergroup_{count}".format( |
|
1163 | 1163 | prefix=self._test_name, |
|
1164 | 1164 | count=len(self.user_group_ids)) |
|
1165 | 1165 | user_group = self.fixture.create_user_group(group_name, **kwargs) |
|
1166 | 1166 | if auto_cleanup: |
|
1167 | 1167 | self.user_group_ids.append(user_group.users_group_id) |
|
1168 | 1168 | if members: |
|
1169 | 1169 | for user in members: |
|
1170 | 1170 | UserGroupModel().add_user_to_group(user_group, user) |
|
1171 | 1171 | return user_group |
|
1172 | 1172 | |
|
1173 | 1173 | def grant_user_permission(self, user_name, permission_name): |
|
1174 | 1174 | self._inherit_default_user_permissions(user_name, False) |
|
1175 | 1175 | self.user_permissions.append((user_name, permission_name)) |
|
1176 | 1176 | |
|
1177 | 1177 | def grant_user_permission_to_repo_group( |
|
1178 | 1178 | self, repo_group, user, permission_name): |
|
1179 | 1179 | permission = RepoGroupModel().grant_user_permission( |
|
1180 | 1180 | repo_group, user, permission_name) |
|
1181 | 1181 | self.user_repo_group_permission_ids.append( |
|
1182 | 1182 | (repo_group.group_id, user.user_id)) |
|
1183 | 1183 | return permission |
|
1184 | 1184 | |
|
1185 | 1185 | def grant_user_group_permission_to_repo_group( |
|
1186 | 1186 | self, repo_group, user_group, permission_name): |
|
1187 | 1187 | permission = RepoGroupModel().grant_user_group_permission( |
|
1188 | 1188 | repo_group, user_group, permission_name) |
|
1189 | 1189 | self.user_group_repo_group_permission_ids.append( |
|
1190 | 1190 | (repo_group.group_id, user_group.users_group_id)) |
|
1191 | 1191 | return permission |
|
1192 | 1192 | |
|
1193 | 1193 | def grant_user_permission_to_repo( |
|
1194 | 1194 | self, repo, user, permission_name): |
|
1195 | 1195 | permission = RepoModel().grant_user_permission( |
|
1196 | 1196 | repo, user, permission_name) |
|
1197 | 1197 | self.user_repo_permission_ids.append( |
|
1198 | 1198 | (repo.repo_id, user.user_id)) |
|
1199 | 1199 | return permission |
|
1200 | 1200 | |
|
1201 | 1201 | def grant_user_group_permission_to_repo( |
|
1202 | 1202 | self, repo, user_group, permission_name): |
|
1203 | 1203 | permission = RepoModel().grant_user_group_permission( |
|
1204 | 1204 | repo, user_group, permission_name) |
|
1205 | 1205 | self.user_group_repo_permission_ids.append( |
|
1206 | 1206 | (repo.repo_id, user_group.users_group_id)) |
|
1207 | 1207 | return permission |
|
1208 | 1208 | |
|
1209 | 1209 | def grant_user_permission_to_user_group( |
|
1210 | 1210 | self, target_user_group, user, permission_name): |
|
1211 | 1211 | permission = UserGroupModel().grant_user_permission( |
|
1212 | 1212 | target_user_group, user, permission_name) |
|
1213 | 1213 | self.user_user_group_permission_ids.append( |
|
1214 | 1214 | (target_user_group.users_group_id, user.user_id)) |
|
1215 | 1215 | return permission |
|
1216 | 1216 | |
|
1217 | 1217 | def grant_user_group_permission_to_user_group( |
|
1218 | 1218 | self, target_user_group, user_group, permission_name): |
|
1219 | 1219 | permission = UserGroupModel().grant_user_group_permission( |
|
1220 | 1220 | target_user_group, user_group, permission_name) |
|
1221 | 1221 | self.user_group_user_group_permission_ids.append( |
|
1222 | 1222 | (target_user_group.users_group_id, user_group.users_group_id)) |
|
1223 | 1223 | return permission |
|
1224 | 1224 | |
|
1225 | 1225 | def revoke_user_permission(self, user_name, permission_name): |
|
1226 | 1226 | self._inherit_default_user_permissions(user_name, True) |
|
1227 | 1227 | UserModel().revoke_perm(user_name, permission_name) |
|
1228 | 1228 | |
|
1229 | 1229 | def _inherit_default_user_permissions(self, user_name, value): |
|
1230 | 1230 | user = UserModel().get_by_username(user_name) |
|
1231 | 1231 | user.inherit_default_permissions = value |
|
1232 | 1232 | Session().add(user) |
|
1233 | 1233 | Session().commit() |
|
1234 | 1234 | |
|
1235 | 1235 | def cleanup(self): |
|
1236 | 1236 | self._cleanup_permissions() |
|
1237 | 1237 | self._cleanup_repo_groups() |
|
1238 | 1238 | self._cleanup_user_groups() |
|
1239 | 1239 | self._cleanup_users() |
|
1240 | 1240 | |
|
1241 | 1241 | def _cleanup_permissions(self): |
|
1242 | 1242 | if self.user_permissions: |
|
1243 | 1243 | for user_name, permission_name in self.user_permissions: |
|
1244 | 1244 | self.revoke_user_permission(user_name, permission_name) |
|
1245 | 1245 | |
|
1246 | 1246 | for permission in self.user_repo_permission_ids: |
|
1247 | 1247 | RepoModel().revoke_user_permission(*permission) |
|
1248 | 1248 | |
|
1249 | 1249 | for permission in self.user_group_repo_permission_ids: |
|
1250 | 1250 | RepoModel().revoke_user_group_permission(*permission) |
|
1251 | 1251 | |
|
1252 | 1252 | for permission in self.user_repo_group_permission_ids: |
|
1253 | 1253 | RepoGroupModel().revoke_user_permission(*permission) |
|
1254 | 1254 | |
|
1255 | 1255 | for permission in self.user_group_repo_group_permission_ids: |
|
1256 | 1256 | RepoGroupModel().revoke_user_group_permission(*permission) |
|
1257 | 1257 | |
|
1258 | 1258 | for permission in self.user_user_group_permission_ids: |
|
1259 | 1259 | UserGroupModel().revoke_user_permission(*permission) |
|
1260 | 1260 | |
|
1261 | 1261 | for permission in self.user_group_user_group_permission_ids: |
|
1262 | 1262 | UserGroupModel().revoke_user_group_permission(*permission) |
|
1263 | 1263 | |
|
1264 | 1264 | def _cleanup_repo_groups(self): |
|
1265 | 1265 | def _repo_group_compare(first_group_id, second_group_id): |
|
1266 | 1266 | """ |
|
1267 | 1267 | Gives higher priority to the groups with the most complex paths |
|
1268 | 1268 | """ |
|
1269 | 1269 | first_group = RepoGroup.get(first_group_id) |
|
1270 | 1270 | second_group = RepoGroup.get(second_group_id) |
|
1271 | 1271 | first_group_parts = ( |
|
1272 | 1272 | len(first_group.group_name.split('/')) if first_group else 0) |
|
1273 | 1273 | second_group_parts = ( |
|
1274 | 1274 | len(second_group.group_name.split('/')) if second_group else 0) |
|
1275 | 1275 | return cmp(second_group_parts, first_group_parts) |
|
1276 | 1276 | |
|
1277 | 1277 | sorted_repo_group_ids = sorted( |
|
1278 | 1278 | self.repo_group_ids, cmp=_repo_group_compare) |
|
1279 | 1279 | for repo_group_id in sorted_repo_group_ids: |
|
1280 | 1280 | self.fixture.destroy_repo_group(repo_group_id) |
|
1281 | 1281 | |
|
1282 | 1282 | def _cleanup_user_groups(self): |
|
1283 | 1283 | def _user_group_compare(first_group_id, second_group_id): |
|
1284 | 1284 | """ |
|
1285 | 1285 | Gives higher priority to the groups with the most complex paths |
|
1286 | 1286 | """ |
|
1287 | 1287 | first_group = UserGroup.get(first_group_id) |
|
1288 | 1288 | second_group = UserGroup.get(second_group_id) |
|
1289 | 1289 | first_group_parts = ( |
|
1290 | 1290 | len(first_group.users_group_name.split('/')) |
|
1291 | 1291 | if first_group else 0) |
|
1292 | 1292 | second_group_parts = ( |
|
1293 | 1293 | len(second_group.users_group_name.split('/')) |
|
1294 | 1294 | if second_group else 0) |
|
1295 | 1295 | return cmp(second_group_parts, first_group_parts) |
|
1296 | 1296 | |
|
1297 | 1297 | sorted_user_group_ids = sorted( |
|
1298 | 1298 | self.user_group_ids, cmp=_user_group_compare) |
|
1299 | 1299 | for user_group_id in sorted_user_group_ids: |
|
1300 | 1300 | self.fixture.destroy_user_group(user_group_id) |
|
1301 | 1301 | |
|
1302 | 1302 | def _cleanup_users(self): |
|
1303 | 1303 | for user_id in self.user_ids: |
|
1304 | 1304 | self.fixture.destroy_user(user_id) |
|
1305 | 1305 | |
|
1306 | 1306 | |
|
1307 | 1307 | # TODO: Think about moving this into a pytest-pyro package and make it a |
|
1308 | 1308 | # pytest plugin |
|
1309 | 1309 | @pytest.hookimpl(tryfirst=True, hookwrapper=True) |
|
1310 | 1310 | def pytest_runtest_makereport(item, call): |
|
1311 | 1311 | """ |
|
1312 | 1312 | Adding the remote traceback if the exception has this information. |
|
1313 | 1313 | |
|
1314 | 1314 | Pyro4 attaches this information as the attribute `_pyroTraceback` |
|
1315 | 1315 | to the exception instance. |
|
1316 | 1316 | """ |
|
1317 | 1317 | outcome = yield |
|
1318 | 1318 | report = outcome.get_result() |
|
1319 | 1319 | if call.excinfo: |
|
1320 | 1320 | _add_pyro_remote_traceback(report, call.excinfo.value) |
|
1321 | 1321 | |
|
1322 | 1322 | |
|
1323 | 1323 | def _add_pyro_remote_traceback(report, exc): |
|
1324 | 1324 | pyro_traceback = getattr(exc, '_pyroTraceback', None) |
|
1325 | 1325 | |
|
1326 | 1326 | if pyro_traceback: |
|
1327 | 1327 | traceback = ''.join(pyro_traceback) |
|
1328 | 1328 | section = 'Pyro4 remote traceback ' + report.when |
|
1329 | 1329 | report.sections.append((section, traceback)) |
|
1330 | 1330 | |
|
1331 | 1331 | |
|
1332 | 1332 | @pytest.fixture(scope='session') |
|
1333 | 1333 | def testrun(): |
|
1334 | 1334 | return { |
|
1335 | 1335 | 'uuid': uuid.uuid4(), |
|
1336 | 1336 | 'start': datetime.datetime.utcnow().isoformat(), |
|
1337 | 1337 | 'timestamp': int(time.time()), |
|
1338 | 1338 | } |
|
1339 | 1339 | |
|
1340 | 1340 | |
|
1341 | 1341 | @pytest.fixture(autouse=True) |
|
1342 | 1342 | def collect_appenlight_stats(request, testrun): |
|
1343 | 1343 | """ |
|
1344 | 1344 | This fixture reports memory consumtion of single tests. |
|
1345 | 1345 | |
|
1346 | 1346 | It gathers data based on `psutil` and sends them to Appenlight. The option |
|
1347 | 1347 | ``--ae`` has te be used to enable this fixture and the API key for your |
|
1348 | 1348 | application has to be provided in ``--ae-key``. |
|
1349 | 1349 | """ |
|
1350 | 1350 | try: |
|
1351 | 1351 | # cygwin cannot have yet psutil support. |
|
1352 | 1352 | import psutil |
|
1353 | 1353 | except ImportError: |
|
1354 | 1354 | return |
|
1355 | 1355 | |
|
1356 | 1356 | if not request.config.getoption('--appenlight'): |
|
1357 | 1357 | return |
|
1358 | 1358 | else: |
|
1359 | 1359 | # Only request the pylonsapp fixture if appenlight tracking is |
|
1360 | 1360 | # enabled. This will speed up a test run of unit tests by 2 to 3 |
|
1361 | 1361 | # seconds if appenlight is not enabled. |
|
1362 | 1362 | pylonsapp = request.getfuncargvalue("pylonsapp") |
|
1363 | 1363 | url = '{}/api/logs'.format(request.config.getoption('--appenlight-url')) |
|
1364 | 1364 | client = AppenlightClient( |
|
1365 | 1365 | url=url, |
|
1366 | 1366 | api_key=request.config.getoption('--appenlight-api-key'), |
|
1367 | 1367 | namespace=request.node.nodeid, |
|
1368 | 1368 | request=str(testrun['uuid']), |
|
1369 | 1369 | testrun=testrun) |
|
1370 | 1370 | |
|
1371 | 1371 | client.collect({ |
|
1372 | 1372 | 'message': "Starting", |
|
1373 | 1373 | }) |
|
1374 | 1374 | |
|
1375 | 1375 | server_and_port = pylonsapp.config['vcs.server'] |
|
1376 | 1376 | server = create_vcsserver_proxy(server_and_port) |
|
1377 | 1377 | with server: |
|
1378 | 1378 | vcs_pid = server.get_pid() |
|
1379 | 1379 | server.run_gc() |
|
1380 | 1380 | vcs_process = psutil.Process(vcs_pid) |
|
1381 | 1381 | mem = vcs_process.memory_info() |
|
1382 | 1382 | client.tag_before('vcsserver.rss', mem.rss) |
|
1383 | 1383 | client.tag_before('vcsserver.vms', mem.vms) |
|
1384 | 1384 | |
|
1385 | 1385 | test_process = psutil.Process() |
|
1386 | 1386 | mem = test_process.memory_info() |
|
1387 | 1387 | client.tag_before('test.rss', mem.rss) |
|
1388 | 1388 | client.tag_before('test.vms', mem.vms) |
|
1389 | 1389 | |
|
1390 | 1390 | client.tag_before('time', time.time()) |
|
1391 | 1391 | |
|
1392 | 1392 | @request.addfinalizer |
|
1393 | 1393 | def send_stats(): |
|
1394 | 1394 | client.tag_after('time', time.time()) |
|
1395 | 1395 | with server: |
|
1396 | 1396 | gc_stats = server.run_gc() |
|
1397 | 1397 | for tag, value in gc_stats.items(): |
|
1398 | 1398 | client.tag_after(tag, value) |
|
1399 | 1399 | mem = vcs_process.memory_info() |
|
1400 | 1400 | client.tag_after('vcsserver.rss', mem.rss) |
|
1401 | 1401 | client.tag_after('vcsserver.vms', mem.vms) |
|
1402 | 1402 | |
|
1403 | 1403 | mem = test_process.memory_info() |
|
1404 | 1404 | client.tag_after('test.rss', mem.rss) |
|
1405 | 1405 | client.tag_after('test.vms', mem.vms) |
|
1406 | 1406 | |
|
1407 | 1407 | client.collect({ |
|
1408 | 1408 | 'message': "Finished", |
|
1409 | 1409 | }) |
|
1410 | 1410 | client.send_stats() |
|
1411 | 1411 | |
|
1412 | 1412 | return client |
|
1413 | 1413 | |
|
1414 | 1414 | |
|
1415 | 1415 | class AppenlightClient(): |
|
1416 | 1416 | |
|
1417 | 1417 | url_template = '{url}?protocol_version=0.5' |
|
1418 | 1418 | |
|
1419 | 1419 | def __init__( |
|
1420 | 1420 | self, url, api_key, add_server=True, add_timestamp=True, |
|
1421 | 1421 | namespace=None, request=None, testrun=None): |
|
1422 | 1422 | self.url = self.url_template.format(url=url) |
|
1423 | 1423 | self.api_key = api_key |
|
1424 | 1424 | self.add_server = add_server |
|
1425 | 1425 | self.add_timestamp = add_timestamp |
|
1426 | 1426 | self.namespace = namespace |
|
1427 | 1427 | self.request = request |
|
1428 | 1428 | self.server = socket.getfqdn(socket.gethostname()) |
|
1429 | 1429 | self.tags_before = {} |
|
1430 | 1430 | self.tags_after = {} |
|
1431 | 1431 | self.stats = [] |
|
1432 | 1432 | self.testrun = testrun or {} |
|
1433 | 1433 | |
|
1434 | 1434 | def tag_before(self, tag, value): |
|
1435 | 1435 | self.tags_before[tag] = value |
|
1436 | 1436 | |
|
1437 | 1437 | def tag_after(self, tag, value): |
|
1438 | 1438 | self.tags_after[tag] = value |
|
1439 | 1439 | |
|
1440 | 1440 | def collect(self, data): |
|
1441 | 1441 | if self.add_server: |
|
1442 | 1442 | data.setdefault('server', self.server) |
|
1443 | 1443 | if self.add_timestamp: |
|
1444 | 1444 | data.setdefault('date', datetime.datetime.utcnow().isoformat()) |
|
1445 | 1445 | if self.namespace: |
|
1446 | 1446 | data.setdefault('namespace', self.namespace) |
|
1447 | 1447 | if self.request: |
|
1448 | 1448 | data.setdefault('request', self.request) |
|
1449 | 1449 | self.stats.append(data) |
|
1450 | 1450 | |
|
1451 | 1451 | def send_stats(self): |
|
1452 | 1452 | tags = [ |
|
1453 | 1453 | ('testrun', self.request), |
|
1454 | 1454 | ('testrun.start', self.testrun['start']), |
|
1455 | 1455 | ('testrun.timestamp', self.testrun['timestamp']), |
|
1456 | 1456 | ('test', self.namespace), |
|
1457 | 1457 | ] |
|
1458 | 1458 | for key, value in self.tags_before.items(): |
|
1459 | 1459 | tags.append((key + '.before', value)) |
|
1460 | 1460 | try: |
|
1461 | 1461 | delta = self.tags_after[key] - value |
|
1462 | 1462 | tags.append((key + '.delta', delta)) |
|
1463 | 1463 | except Exception: |
|
1464 | 1464 | pass |
|
1465 | 1465 | for key, value in self.tags_after.items(): |
|
1466 | 1466 | tags.append((key + '.after', value)) |
|
1467 | 1467 | self.collect({ |
|
1468 | 1468 | 'message': "Collected tags", |
|
1469 | 1469 | 'tags': tags, |
|
1470 | 1470 | }) |
|
1471 | 1471 | |
|
1472 | 1472 | response = requests.post( |
|
1473 | 1473 | self.url, |
|
1474 | 1474 | headers={ |
|
1475 | 1475 | 'X-appenlight-api-key': self.api_key}, |
|
1476 | 1476 | json=self.stats, |
|
1477 | 1477 | ) |
|
1478 | 1478 | |
|
1479 | 1479 | if not response.status_code == 200: |
|
1480 | 1480 | pprint.pprint(self.stats) |
|
1481 | 1481 | print response.headers |
|
1482 | 1482 | print response.text |
|
1483 | 1483 | raise Exception('Sending to appenlight failed') |
|
1484 | 1484 | |
|
1485 | 1485 | |
|
1486 | 1486 | @pytest.fixture |
|
1487 | 1487 | def gist_util(request, pylonsapp): |
|
1488 | 1488 | """ |
|
1489 | 1489 | Provides a wired instance of `GistUtility` with integrated cleanup. |
|
1490 | 1490 | """ |
|
1491 | 1491 | utility = GistUtility() |
|
1492 | 1492 | request.addfinalizer(utility.cleanup) |
|
1493 | 1493 | return utility |
|
1494 | 1494 | |
|
1495 | 1495 | |
|
1496 | 1496 | class GistUtility(object): |
|
1497 | 1497 | def __init__(self): |
|
1498 | 1498 | self.fixture = Fixture() |
|
1499 | 1499 | self.gist_ids = [] |
|
1500 | 1500 | |
|
1501 | 1501 | def create_gist(self, **kwargs): |
|
1502 | 1502 | gist = self.fixture.create_gist(**kwargs) |
|
1503 | 1503 | self.gist_ids.append(gist.gist_id) |
|
1504 | 1504 | return gist |
|
1505 | 1505 | |
|
1506 | 1506 | def cleanup(self): |
|
1507 | 1507 | for id_ in self.gist_ids: |
|
1508 | 1508 | self.fixture.destroy_gists(str(id_)) |
|
1509 | 1509 | |
|
1510 | 1510 | |
|
1511 | 1511 | @pytest.fixture |
|
1512 | 1512 | def enabled_backends(request): |
|
1513 | 1513 | backends = request.config.option.backends |
|
1514 | 1514 | return backends[:] |
|
1515 | 1515 | |
|
1516 | 1516 | |
|
1517 | 1517 | @pytest.fixture |
|
1518 | 1518 | def settings_util(request): |
|
1519 | 1519 | """ |
|
1520 | 1520 | Provides a wired instance of `SettingsUtility` with integrated cleanup. |
|
1521 | 1521 | """ |
|
1522 | 1522 | utility = SettingsUtility() |
|
1523 | 1523 | request.addfinalizer(utility.cleanup) |
|
1524 | 1524 | return utility |
|
1525 | 1525 | |
|
1526 | 1526 | |
|
1527 | 1527 | class SettingsUtility(object): |
|
1528 | 1528 | def __init__(self): |
|
1529 | 1529 | self.rhodecode_ui_ids = [] |
|
1530 | 1530 | self.rhodecode_setting_ids = [] |
|
1531 | 1531 | self.repo_rhodecode_ui_ids = [] |
|
1532 | 1532 | self.repo_rhodecode_setting_ids = [] |
|
1533 | 1533 | |
|
1534 | 1534 | def create_repo_rhodecode_ui( |
|
1535 | 1535 | self, repo, section, value, key=None, active=True, cleanup=True): |
|
1536 | 1536 | key = key or hashlib.sha1( |
|
1537 | 1537 | '{}{}{}'.format(section, value, repo.repo_id)).hexdigest() |
|
1538 | 1538 | |
|
1539 | 1539 | setting = RepoRhodeCodeUi() |
|
1540 | 1540 | setting.repository_id = repo.repo_id |
|
1541 | 1541 | setting.ui_section = section |
|
1542 | 1542 | setting.ui_value = value |
|
1543 | 1543 | setting.ui_key = key |
|
1544 | 1544 | setting.ui_active = active |
|
1545 | 1545 | Session().add(setting) |
|
1546 | 1546 | Session().commit() |
|
1547 | 1547 | |
|
1548 | 1548 | if cleanup: |
|
1549 | 1549 | self.repo_rhodecode_ui_ids.append(setting.ui_id) |
|
1550 | 1550 | return setting |
|
1551 | 1551 | |
|
1552 | 1552 | def create_rhodecode_ui( |
|
1553 | 1553 | self, section, value, key=None, active=True, cleanup=True): |
|
1554 | 1554 | key = key or hashlib.sha1('{}{}'.format(section, value)).hexdigest() |
|
1555 | 1555 | |
|
1556 | 1556 | setting = RhodeCodeUi() |
|
1557 | 1557 | setting.ui_section = section |
|
1558 | 1558 | setting.ui_value = value |
|
1559 | 1559 | setting.ui_key = key |
|
1560 | 1560 | setting.ui_active = active |
|
1561 | 1561 | Session().add(setting) |
|
1562 | 1562 | Session().commit() |
|
1563 | 1563 | |
|
1564 | 1564 | if cleanup: |
|
1565 | 1565 | self.rhodecode_ui_ids.append(setting.ui_id) |
|
1566 | 1566 | return setting |
|
1567 | 1567 | |
|
1568 | 1568 | def create_repo_rhodecode_setting( |
|
1569 | 1569 | self, repo, name, value, type_, cleanup=True): |
|
1570 | 1570 | setting = RepoRhodeCodeSetting( |
|
1571 | 1571 | repo.repo_id, key=name, val=value, type=type_) |
|
1572 | 1572 | Session().add(setting) |
|
1573 | 1573 | Session().commit() |
|
1574 | 1574 | |
|
1575 | 1575 | if cleanup: |
|
1576 | 1576 | self.repo_rhodecode_setting_ids.append(setting.app_settings_id) |
|
1577 | 1577 | return setting |
|
1578 | 1578 | |
|
1579 | 1579 | def create_rhodecode_setting(self, name, value, type_, cleanup=True): |
|
1580 | 1580 | setting = RhodeCodeSetting(key=name, val=value, type=type_) |
|
1581 | 1581 | Session().add(setting) |
|
1582 | 1582 | Session().commit() |
|
1583 | 1583 | |
|
1584 | 1584 | if cleanup: |
|
1585 | 1585 | self.rhodecode_setting_ids.append(setting.app_settings_id) |
|
1586 | 1586 | |
|
1587 | 1587 | return setting |
|
1588 | 1588 | |
|
1589 | 1589 | def cleanup(self): |
|
1590 | 1590 | for id_ in self.rhodecode_ui_ids: |
|
1591 | 1591 | setting = RhodeCodeUi.get(id_) |
|
1592 | 1592 | Session().delete(setting) |
|
1593 | 1593 | |
|
1594 | 1594 | for id_ in self.rhodecode_setting_ids: |
|
1595 | 1595 | setting = RhodeCodeSetting.get(id_) |
|
1596 | 1596 | Session().delete(setting) |
|
1597 | 1597 | |
|
1598 | 1598 | for id_ in self.repo_rhodecode_ui_ids: |
|
1599 | 1599 | setting = RepoRhodeCodeUi.get(id_) |
|
1600 | 1600 | Session().delete(setting) |
|
1601 | 1601 | |
|
1602 | 1602 | for id_ in self.repo_rhodecode_setting_ids: |
|
1603 | 1603 | setting = RepoRhodeCodeSetting.get(id_) |
|
1604 | 1604 | Session().delete(setting) |
|
1605 | 1605 | |
|
1606 | 1606 | Session().commit() |
|
1607 | 1607 | |
|
1608 | 1608 | |
|
1609 | 1609 | @pytest.fixture |
|
1610 | 1610 | def no_notifications(request): |
|
1611 | 1611 | notification_patcher = mock.patch( |
|
1612 | 1612 | 'rhodecode.model.notification.NotificationModel.create') |
|
1613 | 1613 | notification_patcher.start() |
|
1614 | 1614 | request.addfinalizer(notification_patcher.stop) |
|
1615 | 1615 | |
|
1616 | 1616 | |
|
1617 | 1617 | @pytest.fixture |
|
1618 | 1618 | def silence_action_logger(request): |
|
1619 | 1619 | notification_patcher = mock.patch( |
|
1620 | 1620 | 'rhodecode.lib.utils.action_logger') |
|
1621 | 1621 | notification_patcher.start() |
|
1622 | 1622 | request.addfinalizer(notification_patcher.stop) |
|
1623 | 1623 | |
|
1624 | 1624 | |
|
1625 | 1625 | @pytest.fixture(scope='session') |
|
1626 | 1626 | def repeat(request): |
|
1627 | 1627 | """ |
|
1628 | 1628 | The number of repetitions is based on this fixture. |
|
1629 | 1629 | |
|
1630 | 1630 | Slower calls may divide it by 10 or 100. It is chosen in a way so that the |
|
1631 | 1631 | tests are not too slow in our default test suite. |
|
1632 | 1632 | """ |
|
1633 | 1633 | return request.config.getoption('--repeat') |
|
1634 | 1634 | |
|
1635 | 1635 | |
|
1636 | 1636 | @pytest.fixture |
|
1637 | 1637 | def rhodecode_fixtures(): |
|
1638 | 1638 | return Fixture() |
|
1639 | 1639 | |
|
1640 | 1640 | |
|
1641 | 1641 | @pytest.fixture |
|
1642 | 1642 | def request_stub(): |
|
1643 | 1643 | """ |
|
1644 | 1644 | Stub request object. |
|
1645 | 1645 | """ |
|
1646 | 1646 | request = pyramid.testing.DummyRequest() |
|
1647 | 1647 | request.scheme = 'https' |
|
1648 | 1648 | return request |
|
1649 | 1649 | |
|
1650 | 1650 | |
|
1651 | 1651 | @pytest.fixture |
|
1652 | 1652 | def config_stub(request, request_stub): |
|
1653 | 1653 | """ |
|
1654 | 1654 | Set up pyramid.testing and return the Configurator. |
|
1655 | 1655 | """ |
|
1656 | 1656 | config = pyramid.testing.setUp(request=request_stub) |
|
1657 | 1657 | |
|
1658 | 1658 | @request.addfinalizer |
|
1659 | 1659 | def cleanup(): |
|
1660 | 1660 | pyramid.testing.tearDown() |
|
1661 | 1661 | |
|
1662 | 1662 | return config |
|
1663 | 1663 | |
|
1664 | 1664 | |
|
1665 | 1665 | @pytest.fixture |
|
1666 | 1666 | def StubIntegrationType(): |
|
1667 | 1667 | class _StubIntegrationType(IntegrationTypeBase): |
|
1668 | 1668 | """ Test integration type class """ |
|
1669 | 1669 | |
|
1670 | 1670 | key = 'test' |
|
1671 | 1671 | display_name = 'Test integration type' |
|
1672 | 1672 | description = 'A test integration type for testing' |
|
1673 | 1673 | icon = 'test_icon_html_image' |
|
1674 | 1674 | |
|
1675 | 1675 | def __init__(self, settings): |
|
1676 | 1676 | super(_StubIntegrationType, self).__init__(settings) |
|
1677 | 1677 | self.sent_events = [] # for testing |
|
1678 | 1678 | |
|
1679 | 1679 | def send_event(self, event): |
|
1680 | 1680 | self.sent_events.append(event) |
|
1681 | 1681 | |
|
1682 | 1682 | def settings_schema(self): |
|
1683 | 1683 | class SettingsSchema(colander.Schema): |
|
1684 | 1684 | test_string_field = colander.SchemaNode( |
|
1685 | 1685 | colander.String(), |
|
1686 | 1686 | missing=colander.required, |
|
1687 | 1687 | title='test string field', |
|
1688 | 1688 | ) |
|
1689 | 1689 | test_int_field = colander.SchemaNode( |
|
1690 | 1690 | colander.Int(), |
|
1691 | 1691 | title='some integer setting', |
|
1692 | 1692 | ) |
|
1693 | 1693 | return SettingsSchema() |
|
1694 | 1694 | |
|
1695 | 1695 | |
|
1696 | 1696 | integration_type_registry.register_integration_type(_StubIntegrationType) |
|
1697 | 1697 | return _StubIntegrationType |
|
1698 | 1698 | |
|
1699 | 1699 | @pytest.fixture |
|
1700 | 1700 | def stub_integration_settings(): |
|
1701 | 1701 | return { |
|
1702 | 1702 | 'test_string_field': 'some data', |
|
1703 | 1703 | 'test_int_field': 100, |
|
1704 | 1704 | } |
|
1705 | 1705 | |
|
1706 | 1706 | |
|
1707 | 1707 | @pytest.fixture |
|
1708 | 1708 | def repo_integration_stub(request, repo_stub, StubIntegrationType, |
|
1709 | 1709 | stub_integration_settings): |
|
1710 | 1710 | integration = IntegrationModel().create( |
|
1711 | 1711 | StubIntegrationType, settings=stub_integration_settings, enabled=True, |
|
1712 |
name='test repo integration', |
|
|
1712 | name='test repo integration', | |
|
1713 | repo=repo_stub, repo_group=None, child_repos_only=None) | |
|
1713 | 1714 | |
|
1714 | 1715 | @request.addfinalizer |
|
1715 | 1716 | def cleanup(): |
|
1716 | 1717 | IntegrationModel().delete(integration) |
|
1717 | 1718 | |
|
1718 | 1719 | return integration |
|
1719 | 1720 | |
|
1720 | 1721 | |
|
1721 | 1722 | @pytest.fixture |
|
1722 | 1723 | def repogroup_integration_stub(request, test_repo_group, StubIntegrationType, |
|
1723 | 1724 | stub_integration_settings): |
|
1724 | 1725 | integration = IntegrationModel().create( |
|
1725 | 1726 | StubIntegrationType, settings=stub_integration_settings, enabled=True, |
|
1726 |
name='test repogroup integration', |
|
|
1727 | name='test repogroup integration', | |
|
1728 | repo=None, repo_group=test_repo_group, child_repos_only=True) | |
|
1729 | ||
|
1730 | @request.addfinalizer | |
|
1731 | def cleanup(): | |
|
1732 | IntegrationModel().delete(integration) | |
|
1733 | ||
|
1734 | return integration | |
|
1735 | ||
|
1736 | ||
|
1737 | @pytest.fixture | |
|
1738 | def repogroup_recursive_integration_stub(request, test_repo_group, | |
|
1739 | StubIntegrationType, stub_integration_settings): | |
|
1740 | integration = IntegrationModel().create( | |
|
1741 | StubIntegrationType, settings=stub_integration_settings, enabled=True, | |
|
1742 | name='test recursive repogroup integration', | |
|
1743 | repo=None, repo_group=test_repo_group, child_repos_only=False) | |
|
1727 | 1744 | |
|
1728 | 1745 | @request.addfinalizer |
|
1729 | 1746 | def cleanup(): |
|
1730 | 1747 | IntegrationModel().delete(integration) |
|
1731 | 1748 | |
|
1732 | 1749 | return integration |
|
1733 | 1750 | |
|
1734 | 1751 | |
|
1735 | 1752 | @pytest.fixture |
|
1736 | 1753 | def global_integration_stub(request, StubIntegrationType, |
|
1737 | 1754 | stub_integration_settings): |
|
1738 | 1755 | integration = IntegrationModel().create( |
|
1739 | 1756 | StubIntegrationType, settings=stub_integration_settings, enabled=True, |
|
1740 |
name='test global integration', |
|
|
1757 | name='test global integration', | |
|
1758 | repo=None, repo_group=None, child_repos_only=None) | |
|
1741 | 1759 | |
|
1742 | 1760 | @request.addfinalizer |
|
1743 | 1761 | def cleanup(): |
|
1744 | 1762 | IntegrationModel().delete(integration) |
|
1745 | 1763 | |
|
1746 | 1764 | return integration |
|
1747 | 1765 | |
|
1748 | 1766 | |
|
1749 | 1767 | @pytest.fixture |
|
1750 | 1768 | def root_repos_integration_stub(request, StubIntegrationType, |
|
1751 | 1769 | stub_integration_settings): |
|
1752 | 1770 | integration = IntegrationModel().create( |
|
1753 | 1771 | StubIntegrationType, settings=stub_integration_settings, enabled=True, |
|
1754 |
name='test global integration', |
|
|
1772 | name='test global integration', | |
|
1773 | repo=None, repo_group=None, child_repos_only=True) | |
|
1755 | 1774 | |
|
1756 | 1775 | @request.addfinalizer |
|
1757 | 1776 | def cleanup(): |
|
1758 | 1777 | IntegrationModel().delete(integration) |
|
1759 | 1778 | |
|
1760 | 1779 | return integration |
General Comments 0
You need to be logged in to leave comments.
Login now