##// END OF EJS Templates
integrations: add recursive repo group scope to allow integrations...
dan -
r793:fc8d2069 default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,385 +1,392 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import pylons
22 22 import deform
23 23 import logging
24 24 import colander
25 25 import peppercorn
26 26 import webhelpers.paginate
27 27
28 28 from pyramid.httpexceptions import HTTPFound, HTTPForbidden, HTTPBadRequest
29 29 from pyramid.renderers import render
30 30 from pyramid.response import Response
31 31
32 32 from rhodecode.lib import auth
33 33 from rhodecode.lib.auth import LoginRequired, HasPermissionAllDecorator
34 34 from rhodecode.lib.utils2 import safe_int
35 35 from rhodecode.lib.helpers import Page
36 36 from rhodecode.model.db import Repository, RepoGroup, Session, Integration
37 37 from rhodecode.model.scm import ScmModel
38 38 from rhodecode.model.integration import IntegrationModel
39 39 from rhodecode.admin.navigation import navigation_list
40 40 from rhodecode.translation import _
41 41 from rhodecode.integrations import integration_type_registry
42 42 from rhodecode.model.validation_schema.schemas.integration_schema import (
43 make_integration_schema)
43 make_integration_schema, IntegrationScopeType)
44 44
45 45 log = logging.getLogger(__name__)
46 46
47 47
48 48 class IntegrationSettingsViewBase(object):
49 49 """ Base Integration settings view used by both repo / global settings """
50 50
51 51 def __init__(self, context, request):
52 52 self.context = context
53 53 self.request = request
54 54 self._load_general_context()
55 55
56 56 if not self.perm_check(request.user):
57 57 raise HTTPForbidden()
58 58
59 59 def _load_general_context(self):
60 60 """
61 61 This avoids boilerplate for repo/global+list/edit+views/templates
62 62 by doing all possible contexts at the same time however it should
63 63 be split up into separate functions once more "contexts" exist
64 64 """
65 65
66 66 self.IntegrationType = None
67 67 self.repo = None
68 68 self.repo_group = None
69 69 self.integration = None
70 70 self.integrations = {}
71 71
72 72 request = self.request
73 73
74 74 if 'repo_name' in request.matchdict: # in repo settings context
75 75 repo_name = request.matchdict['repo_name']
76 76 self.repo = Repository.get_by_repo_name(repo_name)
77 77
78 78 if 'repo_group_name' in request.matchdict: # in group settings context
79 79 repo_group_name = request.matchdict['repo_group_name']
80 80 self.repo_group = RepoGroup.get_by_group_name(repo_group_name)
81 81
82 82
83 83 if 'integration' in request.matchdict: # integration type context
84 84 integration_type = request.matchdict['integration']
85 85 self.IntegrationType = integration_type_registry[integration_type]
86 86
87 87 if 'integration_id' in request.matchdict: # single integration context
88 88 integration_id = request.matchdict['integration_id']
89 89 self.integration = Integration.get(integration_id)
90 90
91 91 # extra perms check just in case
92 92 if not self._has_perms_for_integration(self.integration):
93 93 raise HTTPForbidden()
94 94
95 95 self.settings = self.integration and self.integration.settings or {}
96 96 self.admin_view = not (self.repo or self.repo_group)
97 97
98 98 def _has_perms_for_integration(self, integration):
99 99 perms = self.request.user.permissions
100 100
101 101 if 'hg.admin' in perms['global']:
102 102 return True
103 103
104 104 if integration.repo:
105 105 return perms['repositories'].get(
106 106 integration.repo.repo_name) == 'repository.admin'
107 107
108 108 if integration.repo_group:
109 109 return perms['repositories_groups'].get(
110 110 integration.repo_group.group_name) == 'group.admin'
111 111
112 112 return False
113 113
114 114 def _template_c_context(self):
115 115 # TODO: dan: this is a stopgap in order to inherit from current pylons
116 116 # based admin/repo settings templates - this should be removed entirely
117 117 # after port to pyramid
118 118
119 119 c = pylons.tmpl_context
120 120 c.active = 'integrations'
121 121 c.rhodecode_user = self.request.user
122 122 c.repo = self.repo
123 123 c.repo_group = self.repo_group
124 124 c.repo_name = self.repo and self.repo.repo_name or None
125 125 c.repo_group_name = self.repo_group and self.repo_group.group_name or None
126 126
127 127 if self.repo:
128 128 c.repo_info = self.repo
129 129 c.rhodecode_db_repo = self.repo
130 130 c.repository_pull_requests = ScmModel().get_pull_requests(self.repo)
131 131 else:
132 132 c.navlist = navigation_list(self.request)
133 133
134 134 return c
135 135
136 136 def _form_schema(self):
137 137 schema = make_integration_schema(IntegrationType=self.IntegrationType,
138 138 settings=self.settings)
139 139
140 140 # returns a clone, important if mutating the schema later
141 141 return schema.bind(
142 142 permissions=self.request.user.permissions,
143 143 no_scope=not self.admin_view)
144 144
145 145
146 146 def _form_defaults(self):
147 147 defaults = {}
148 148
149 149 if self.integration:
150 150 defaults['settings'] = self.integration.settings or {}
151 151 defaults['options'] = {
152 152 'name': self.integration.name,
153 153 'enabled': self.integration.enabled,
154 'scope': self.integration.scope,
154 'scope': {
155 'repo': self.integration.repo,
156 'repo_group': self.integration.repo_group,
157 'child_repos_only': self.integration.child_repos_only,
158 },
155 159 }
156 160 else:
157 161 if self.repo:
158 162 scope = _('{repo_name} repository').format(
159 163 repo_name=self.repo.repo_name)
160 164 elif self.repo_group:
161 165 scope = _('{repo_group_name} repo group').format(
162 166 repo_group_name=self.repo_group.group_name)
163 167 else:
164 168 scope = _('Global')
165 169
166 170 defaults['options'] = {
167 171 'enabled': True,
168 172 'name': _('{name} integration').format(
169 173 name=self.IntegrationType.display_name),
170 174 }
171 if self.repo:
172 defaults['options']['scope'] = self.repo
173 elif self.repo_group:
174 defaults['options']['scope'] = self.repo_group
175 defaults['options']['scope'] = {
176 'repo': self.repo,
177 'repo_group': self.repo_group,
178 }
175 179
176 180 return defaults
177 181
178 182 def _delete_integration(self, integration):
179 183 Session().delete(self.integration)
180 184 Session().commit()
181 185 self.request.session.flash(
182 186 _('Integration {integration_name} deleted successfully.').format(
183 187 integration_name=self.integration.name),
184 188 queue='success')
185 189
186 190 if self.repo:
187 191 redirect_to = self.request.route_url(
188 192 'repo_integrations_home', repo_name=self.repo.repo_name)
189 193 elif self.repo_group:
190 194 redirect_to = self.request.route_url(
191 195 'repo_group_integrations_home',
192 196 repo_group_name=self.repo_group.group_name)
193 197 else:
194 198 redirect_to = self.request.route_url('global_integrations_home')
195 199 raise HTTPFound(redirect_to)
196 200
197 201 def settings_get(self, defaults=None, form=None):
198 202 """
199 203 View that displays the integration settings as a form.
200 204 """
201 205
202 206 defaults = defaults or self._form_defaults()
203 207 schema = self._form_schema()
204 208
205 209 if self.integration:
206 210 buttons = ('submit', 'delete')
207 211 else:
208 212 buttons = ('submit',)
209 213
210 214 form = form or deform.Form(schema, appstruct=defaults, buttons=buttons)
211 215
212 216 template_context = {
213 217 'form': form,
214 218 'current_IntegrationType': self.IntegrationType,
215 219 'integration': self.integration,
216 220 'c': self._template_c_context(),
217 221 }
218 222
219 223 return template_context
220 224
221 225 @auth.CSRFRequired()
222 226 def settings_post(self):
223 227 """
224 228 View that validates and stores the integration settings.
225 229 """
226 230 controls = self.request.POST.items()
227 231 pstruct = peppercorn.parse(controls)
228 232
229 233 if self.integration and pstruct.get('delete'):
230 234 return self._delete_integration(self.integration)
231 235
232 236 schema = self._form_schema()
233 237
234 238 skip_settings_validation = False
235 239 if self.integration and 'enabled' not in pstruct.get('options', {}):
236 240 skip_settings_validation = True
237 241 schema['settings'].validator = None
238 242 for field in schema['settings'].children:
239 243 field.validator = None
240 244 field.missing = ''
241 245
242 246 if self.integration:
243 247 buttons = ('submit', 'delete')
244 248 else:
245 249 buttons = ('submit',)
246 250
247 251 form = deform.Form(schema, buttons=buttons)
248 252
249 253 if not self.admin_view:
250 254 # scope is read only field in these cases, and has to be added
251 255 options = pstruct.setdefault('options', {})
252 256 if 'scope' not in options:
253 if self.repo:
254 options['scope'] = 'repo:{}'.format(self.repo.repo_name)
255 elif self.repo_group:
256 options['scope'] = 'repogroup:{}'.format(
257 self.repo_group.group_name)
257 options['scope'] = IntegrationScopeType().serialize(None, {
258 'repo': self.repo,
259 'repo_group': self.repo_group,
260 })
258 261
259 262 try:
260 263 valid_data = form.validate_pstruct(pstruct)
261 264 except deform.ValidationFailure as e:
262 265 self.request.session.flash(
263 266 _('Errors exist when saving integration settings. '
264 267 'Please check the form inputs.'),
265 268 queue='error')
266 269 return self.settings_get(form=e)
267 270
268 271 if not self.integration:
269 272 self.integration = Integration()
270 273 self.integration.integration_type = self.IntegrationType.key
271 274 Session().add(self.integration)
272 275
273 276 scope = valid_data['options']['scope']
274 277
275 278 IntegrationModel().update_integration(self.integration,
276 279 name=valid_data['options']['name'],
277 280 enabled=valid_data['options']['enabled'],
278 281 settings=valid_data['settings'],
279 scope=scope)
282 repo=scope['repo'],
283 repo_group=scope['repo_group'],
284 child_repos_only=scope['child_repos_only'],
285 )
286
280 287
281 288 self.integration.settings = valid_data['settings']
282 289 Session().commit()
283 290 # Display success message and redirect.
284 291 self.request.session.flash(
285 292 _('Integration {integration_name} updated successfully.').format(
286 293 integration_name=self.IntegrationType.display_name),
287 294 queue='success')
288 295
289 296
290 297 # if integration scope changes, we must redirect to the right place
291 298 # keeping in mind if the original view was for /repo/ or /_admin/
292 299 admin_view = not (self.repo or self.repo_group)
293 300
294 if isinstance(self.integration.scope, Repository) and not admin_view:
301 if self.integration.repo and not admin_view:
295 302 redirect_to = self.request.route_path(
296 303 'repo_integrations_edit',
297 repo_name=self.integration.scope.repo_name,
304 repo_name=self.integration.repo.repo_name,
298 305 integration=self.integration.integration_type,
299 306 integration_id=self.integration.integration_id)
300 elif isinstance(self.integration.scope, RepoGroup) and not admin_view:
307 elif self.integration.repo_group and not admin_view:
301 308 redirect_to = self.request.route_path(
302 309 'repo_group_integrations_edit',
303 repo_group_name=self.integration.scope.group_name,
310 repo_group_name=self.integration.repo_group.group_name,
304 311 integration=self.integration.integration_type,
305 312 integration_id=self.integration.integration_id)
306 313 else:
307 314 redirect_to = self.request.route_path(
308 315 'global_integrations_edit',
309 316 integration=self.integration.integration_type,
310 317 integration_id=self.integration.integration_id)
311 318
312 319 return HTTPFound(redirect_to)
313 320
314 321 def index(self):
315 322 """ List integrations """
316 323 if self.repo:
317 324 scope = self.repo
318 325 elif self.repo_group:
319 326 scope = self.repo_group
320 327 else:
321 328 scope = 'all'
322 329
323 330 integrations = []
324 331
325 332 for integration in IntegrationModel().get_integrations(
326 333 scope=scope, IntegrationType=self.IntegrationType):
327 334
328 335 # extra permissions check *just in case*
329 336 if not self._has_perms_for_integration(integration):
330 337 continue
331 338 integrations.append(integration)
332 339
333 340 sort_arg = self.request.GET.get('sort', 'name:asc')
334 341 if ':' in sort_arg:
335 342 sort_field, sort_dir = sort_arg.split(':')
336 343 else:
337 344 sort_field = sort_arg, 'asc'
338 345
339 346 assert sort_field in ('name', 'integration_type', 'enabled', 'scope')
340 347
341 348 integrations.sort(
342 349 key=lambda x: getattr(x[1], sort_field), reverse=(sort_dir=='desc'))
343 350
344 351
345 352 page_url = webhelpers.paginate.PageURL(
346 353 self.request.path, self.request.GET)
347 354 page = safe_int(self.request.GET.get('page', 1), 1)
348 355
349 356 integrations = Page(integrations, page=page, items_per_page=10,
350 357 url=page_url)
351 358
352 359 template_context = {
353 360 'sort_field': sort_field,
354 361 'rev_sort_dir': sort_dir != 'desc' and 'desc' or 'asc',
355 362 'current_IntegrationType': self.IntegrationType,
356 363 'integrations_list': integrations,
357 364 'available_integrations': integration_type_registry,
358 365 'c': self._template_c_context(),
359 366 'request': self.request,
360 367 }
361 368 return template_context
362 369
363 370 def new_integration(self):
364 371 template_context = {
365 372 'available_integrations': integration_type_registry,
366 373 'c': self._template_c_context(),
367 374 }
368 375 return template_context
369 376
370 377 class GlobalIntegrationsView(IntegrationSettingsViewBase):
371 378 def perm_check(self, user):
372 379 return auth.HasPermissionAll('hg.admin').check_permissions(user=user)
373 380
374 381
375 382 class RepoIntegrationsView(IntegrationSettingsViewBase):
376 383 def perm_check(self, user):
377 384 return auth.HasRepoPermissionAll('repository.admin'
378 385 )(repo_name=self.repo.repo_name, user=user)
379 386
380 387
381 388 class RepoGroupIntegrationsView(IntegrationSettingsViewBase):
382 389 def perm_check(self, user):
383 390 return auth.HasRepoGroupPermissionAll('group.admin'
384 391 )(group_name=self.repo_group.group_name, user=user)
385 392
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
@@ -1,213 +1,222 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 Model for integrations
24 24 """
25 25
26 26
27 27 import logging
28 28 import traceback
29 29
30 30 from pylons import tmpl_context as c
31 31 from pylons.i18n.translation import _, ungettext
32 32 from sqlalchemy import or_, and_
33 33 from sqlalchemy.sql.expression import false, true
34 34 from mako import exceptions
35 35
36 36 import rhodecode
37 37 from rhodecode import events
38 38 from rhodecode.lib import helpers as h
39 39 from rhodecode.lib.caching_query import FromCache
40 40 from rhodecode.lib.utils import PartialRenderer
41 41 from rhodecode.model import BaseModel
42 42 from rhodecode.model.db import Integration, User, Repository, RepoGroup
43 43 from rhodecode.model.meta import Session
44 44 from rhodecode.integrations import integration_type_registry
45 45 from rhodecode.integrations.types.base import IntegrationTypeBase
46 46
47 47 log = logging.getLogger(__name__)
48 48
49 49
50 50 class IntegrationModel(BaseModel):
51 51
52 52 cls = Integration
53 53
54 54 def __get_integration(self, integration):
55 55 if isinstance(integration, Integration):
56 56 return integration
57 57 elif isinstance(integration, (int, long)):
58 58 return self.sa.query(Integration).get(integration)
59 59 else:
60 60 if integration:
61 61 raise Exception('integration must be int, long or Instance'
62 62 ' of Integration got %s' % type(integration))
63 63
64 def create(self, IntegrationType, name, enabled, scope, settings):
64 def create(self, IntegrationType, name, enabled, repo, repo_group,
65 child_repos_only, settings):
65 66 """ Create an IntegrationType integration """
66 67 integration = Integration()
67 68 integration.integration_type = IntegrationType.key
68 69 self.sa.add(integration)
69 self.update_integration(integration, name, enabled, scope, settings)
70 self.update_integration(integration, name, enabled, repo, repo_group,
71 child_repos_only, settings)
70 72 self.sa.commit()
71 73 return integration
72 74
73 def update_integration(self, integration, name, enabled, scope, settings):
74 """
75 :param scope: one of ['global', 'root_repos', <RepoGroup>. <Repository>]
76 """
77
75 def update_integration(self, integration, name, enabled, repo, repo_group,
76 child_repos_only, settings):
78 77 integration = self.__get_integration(integration)
79 78
80 integration.scope = scope
79 integration.repo = repo
80 integration.repo_group = repo_group
81 integration.child_repos_only = child_repos_only
81 82 integration.name = name
82 83 integration.enabled = enabled
83 84 integration.settings = settings
84 85
85 86 return integration
86 87
87 88 def delete(self, integration):
88 89 integration = self.__get_integration(integration)
89 90 if integration:
90 91 self.sa.delete(integration)
91 92 return True
92 93 return False
93 94
94 95 def get_integration_handler(self, integration):
95 96 TypeClass = integration_type_registry.get(integration.integration_type)
96 97 if not TypeClass:
97 98 log.error('No class could be found for integration type: {}'.format(
98 99 integration.integration_type))
99 100 return None
100 101
101 102 return TypeClass(integration.settings)
102 103
103 104 def send_event(self, integration, event):
104 105 """ Send an event to an integration """
105 106 handler = self.get_integration_handler(integration)
106 107 if handler:
107 108 handler.send_event(event)
108 109
109 110 def get_integrations(self, scope, IntegrationType=None):
110 111 """
111 112 Return integrations for a scope, which must be one of:
112 113
113 114 'all' - every integration, global/repogroup/repo
114 115 'global' - global integrations only
115 116 <Repository> instance - integrations for this repo only
116 117 <RepoGroup> instance - integrations for this repogroup only
117 118 """
118 119
119 120 if isinstance(scope, Repository):
120 121 query = self.sa.query(Integration).filter(
121 122 Integration.repo==scope)
122 123 elif isinstance(scope, RepoGroup):
123 124 query = self.sa.query(Integration).filter(
124 125 Integration.repo_group==scope)
125 126 elif scope == 'global':
126 127 # global integrations
127 128 query = self.sa.query(Integration).filter(
128 129 and_(Integration.repo_id==None, Integration.repo_group_id==None)
129 130 )
130 elif scope == 'root_repos':
131 elif scope == 'root-repos':
131 132 query = self.sa.query(Integration).filter(
132 133 and_(Integration.repo_id==None,
133 134 Integration.repo_group_id==None,
134 135 Integration.child_repos_only==True)
135 136 )
136 137 elif scope == 'all':
137 138 query = self.sa.query(Integration)
138 139 else:
139 140 raise Exception(
140 141 "invalid `scope`, must be one of: "
141 142 "['global', 'all', <Repository>, <RepoGroup>]")
142 143
143 144 if IntegrationType is not None:
144 145 query = query.filter(
145 146 Integration.integration_type==IntegrationType.key)
146 147
147 148 result = []
148 149 for integration in query.all():
149 150 IntType = integration_type_registry.get(integration.integration_type)
150 151 result.append((IntType, integration))
151 152 return result
152 153
153 154 def get_for_event(self, event, cache=False):
154 155 """
155 156 Get integrations that match an event
156 157 """
157 158 query = self.sa.query(
158 159 Integration
159 160 ).filter(
160 161 Integration.enabled==True
161 162 )
162 163
163 164 global_integrations_filter = and_(
164 165 Integration.repo_id==None,
165 166 Integration.repo_group_id==None,
166 167 Integration.child_repos_only==False,
167 168 )
168 169
169 170 if isinstance(event, events.RepoEvent):
170 171 root_repos_integrations_filter = and_(
171 172 Integration.repo_id==None,
172 173 Integration.repo_group_id==None,
173 174 Integration.child_repos_only==True,
174 175 )
175 176
176 177 clauses = [
177 178 global_integrations_filter,
178 179 ]
179 180
180 181 # repo integrations
181 182 if event.repo.repo_id: # pre create events dont have a repo_id yet
182 183 clauses.append(
183 184 Integration.repo_id==event.repo.repo_id
184 185 )
185 186
186 187 if event.repo.group:
187 188 clauses.append(
188 Integration.repo_group_id == event.repo.group.group_id
189 and_(
190 Integration.repo_group_id==event.repo.group.group_id,
191 Integration.child_repos_only==True
192 )
189 193 )
190 # repo group cascade to kids (maybe implement this sometime?)
191 # clauses.append(Integration.repo_group_id.in_(
192 # [group.group_id for group in
193 # event.repo.groups_with_parents]
194 # ))
194 # repo group cascade to kids
195 clauses.append(
196 and_(
197 Integration.repo_group_id.in_(
198 [group.group_id for group in
199 event.repo.groups_with_parents]
200 ),
201 Integration.child_repos_only==False
202 )
203 )
195 204
196 205
197 206 if not event.repo.group: # root repo
198 207 clauses.append(root_repos_integrations_filter)
199 208
200 209 query = query.filter(or_(*clauses))
201 210
202 211 if cache:
203 212 query = query.options(FromCache(
204 213 "sql_cache_short",
205 214 "get_enabled_repo_integrations_%i" % event.repo.repo_id))
206 215 else: # only global integrations
207 216 query = query.filter(global_integrations_filter)
208 217 if cache:
209 218 query = query.options(FromCache(
210 219 "sql_cache_short", "get_enabled_global_integrations"))
211 220
212 221 result = query.all()
213 222 return result No newline at end of file
@@ -1,187 +1,226 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import os
22 22
23 23 import deform
24 24 import colander
25 25
26 26 from rhodecode.translation import _
27 27 from rhodecode.model.db import Repository, RepoGroup
28 28 from rhodecode.model.validation_schema import validators, preparers
29 29
30 30
31 31 def integration_scope_choices(permissions):
32 32 """
33 33 Return list of (value, label) choices for integration scopes depending on
34 34 the permissions
35 35 """
36 36 result = [('', _('Pick a scope:'))]
37 37 if 'hg.admin' in permissions['global']:
38 38 result.extend([
39 39 ('global', _('Global (all repositories)')),
40 ('root_repos', _('Top level repositories only')),
40 ('root-repos', _('Top level repositories only')),
41 41 ])
42 42
43 43 repo_choices = [
44 44 ('repo:%s' % repo_name, '/' + repo_name)
45 45 for repo_name, repo_perm
46 46 in permissions['repositories'].items()
47 47 if repo_perm == 'repository.admin'
48 48 ]
49 49 repogroup_choices = [
50 ('repogroup:%s' % repo_group_name, '/' + repo_group_name + ' (group)')
50 ('repogroup:%s' % repo_group_name, '/' + repo_group_name + '/ (child repos only)')
51 for repo_group_name, repo_group_perm
52 in permissions['repositories_groups'].items()
53 if repo_group_perm == 'group.admin'
54 ]
55 repogroup_recursive_choices = [
56 ('repogroup-recursive:%s' % repo_group_name, '/' + repo_group_name + '/ (recursive)')
51 57 for repo_group_name, repo_group_perm
52 58 in permissions['repositories_groups'].items()
53 59 if repo_group_perm == 'group.admin'
54 60 ]
55 61 result.extend(
56 sorted(repogroup_choices + repo_choices,
62 sorted(repogroup_recursive_choices + repogroup_choices + repo_choices,
57 63 key=lambda (choice, label): choice.split(':', 1)[1]
58 64 )
59 65 )
60 66 return result
61 67
62 68
63 69 @colander.deferred
64 70 def deferred_integration_scopes_validator(node, kw):
65 71 perms = kw.get('permissions')
66 72 def _scope_validator(_node, scope):
67 73 is_super_admin = 'hg.admin' in perms['global']
68 74
69 if scope in ('global', 'root_repos'):
75 if scope.get('repo'):
76 if (is_super_admin or perms['repositories'].get(
77 scope['repo'].repo_name) == 'repository.admin'):
78 return True
79 msg = _('Only repo admins can create integrations')
80 raise colander.Invalid(_node, msg)
81 elif scope.get('repo_group'):
82 if (is_super_admin or perms['repositories_groups'].get(
83 scope['repo_group'].group_name) == 'group.admin'):
84 return True
85
86 msg = _('Only repogroup admins can create integrations')
87 raise colander.Invalid(_node, msg)
88 else:
70 89 if is_super_admin:
71 90 return True
72 91 msg = _('Only superadmins can create global integrations')
73 92 raise colander.Invalid(_node, msg)
74 elif isinstance(scope, Repository):
75 if (is_super_admin or perms['repositories'].get(
76 scope.repo_name) == 'repository.admin'):
77 return True
78 msg = _('Only repo admins can create integrations')
79 raise colander.Invalid(_node, msg)
80 elif isinstance(scope, RepoGroup):
81 if (is_super_admin or perms['repositories_groups'].get(
82 scope.group_name) == 'group.admin'):
83 return True
84
85 msg = _('Only repogroup admins can create integrations')
86 raise colander.Invalid(_node, msg)
87
88 msg = _('Invalid integration scope: %s' % scope)
89 raise colander.Invalid(node, msg)
90 93
91 94 return _scope_validator
92 95
93 96
94 97 @colander.deferred
95 98 def deferred_integration_scopes_widget(node, kw):
96 99 if kw.get('no_scope'):
97 100 return deform.widget.TextInputWidget(readonly=True)
98 101
99 102 choices = integration_scope_choices(kw.get('permissions'))
100 103 widget = deform.widget.Select2Widget(values=choices)
101 104 return widget
102 105
103 class IntegrationScope(colander.SchemaType):
106
107 class IntegrationScopeType(colander.SchemaType):
104 108 def serialize(self, node, appstruct):
105 109 if appstruct is colander.null:
106 110 return colander.null
107 111
108 if isinstance(appstruct, Repository):
109 return 'repo:%s' % appstruct.repo_name
110 elif isinstance(appstruct, RepoGroup):
111 return 'repogroup:%s' % appstruct.group_name
112 elif appstruct in ('global', 'root_repos'):
113 return appstruct
112 if appstruct.get('repo'):
113 return 'repo:%s' % appstruct['repo'].repo_name
114 elif appstruct.get('repo_group'):
115 if appstruct.get('child_repos_only'):
116 return 'repogroup:%s' % appstruct['repo_group'].group_name
117 else:
118 return 'repogroup-recursive:%s' % (
119 appstruct['repo_group'].group_name)
120 else:
121 if appstruct.get('child_repos_only'):
122 return 'root-repos'
123 else:
124 return 'global'
125
114 126 raise colander.Invalid(node, '%r is not a valid scope' % appstruct)
115 127
116 128 def deserialize(self, node, cstruct):
117 129 if cstruct is colander.null:
118 130 return colander.null
119 131
120 132 if cstruct.startswith('repo:'):
121 133 repo = Repository.get_by_repo_name(cstruct.split(':')[1])
122 134 if repo:
123 return repo
135 return {
136 'repo': repo,
137 'repo_group': None,
138 'child_repos_only': None,
139 }
140 elif cstruct.startswith('repogroup-recursive:'):
141 repo_group = RepoGroup.get_by_group_name(cstruct.split(':')[1])
142 if repo_group:
143 return {
144 'repo': None,
145 'repo_group': repo_group,
146 'child_repos_only': False
147 }
124 148 elif cstruct.startswith('repogroup:'):
125 149 repo_group = RepoGroup.get_by_group_name(cstruct.split(':')[1])
126 150 if repo_group:
127 return repo_group
128 elif cstruct in ('global', 'root_repos'):
129 return cstruct
151 return {
152 'repo': None,
153 'repo_group': repo_group,
154 'child_repos_only': True
155 }
156 elif cstruct == 'global':
157 return {
158 'repo': None,
159 'repo_group': None,
160 'child_repos_only': False
161 }
162 elif cstruct == 'root-repos':
163 return {
164 'repo': None,
165 'repo_group': None,
166 'child_repos_only': True
167 }
130 168
131 169 raise colander.Invalid(node, '%r is not a valid scope' % cstruct)
132 170
171
133 172 class IntegrationOptionsSchemaBase(colander.MappingSchema):
134 173
135 174 name = colander.SchemaNode(
136 175 colander.String(),
137 176 description=_('Short name for this integration.'),
138 177 missing=colander.required,
139 178 title=_('Integration name'),
140 179 )
141 180
142 181 scope = colander.SchemaNode(
143 IntegrationScope(),
182 IntegrationScopeType(),
144 183 description=_(
145 'Scope of the integration. Group scope means the integration '
146 ' runs on all child repos of that group.'),
184 'Scope of the integration. Recursive means the integration '
185 ' runs on all repos of that group and children recursively.'),
147 186 title=_('Integration scope'),
148 187 validator=deferred_integration_scopes_validator,
149 188 widget=deferred_integration_scopes_widget,
150 189 missing=colander.required,
151 190 )
152 191
153 192 enabled = colander.SchemaNode(
154 193 colander.Bool(),
155 194 default=True,
156 195 description=_('Enable or disable this integration.'),
157 196 missing=False,
158 197 title=_('Enabled'),
159 198 )
160 199
161 200
162 201
163 202 def make_integration_schema(IntegrationType, settings=None):
164 203 """
165 204 Return a colander schema for an integration type
166 205
167 206 :param IntegrationType: the integration type class
168 207 :param settings: existing integration settings dict (optional)
169 208 """
170 209
171 210 settings = settings or {}
172 211 settings_schema = IntegrationType(settings=settings).settings_schema()
173 212
174 213 class IntegrationSchema(colander.Schema):
175 214 options = IntegrationOptionsSchemaBase()
176 215
177 216 schema = IntegrationSchema()
178 217 schema['options'].title = _('General integration options')
179 218
180 219 settings_schema.name = 'settings'
181 220 settings_schema.title = _('{integration_type} settings').format(
182 221 integration_type=IntegrationType.display_name)
183 222 schema.add(settings_schema)
184 223
185 224 return schema
186 225
187 226
@@ -1,249 +1,252 b''
1 1 ## -*- coding: utf-8 -*-
2 2 <%inherit file="base.html"/>
3 3
4 4 <%def name="breadcrumbs_links()">
5 5 %if c.repo:
6 6 ${h.link_to('Settings',h.url('edit_repo', repo_name=c.repo.repo_name))}
7 7 %elif c.repo_group:
8 8 ${h.link_to(_('Admin'),h.url('admin_home'))}
9 9 &raquo;
10 10 ${h.link_to(_('Repository Groups'),h.url('repo_groups'))}
11 11 &raquo;
12 12 ${h.link_to(c.repo_group.group_name,h.url('edit_repo_group', group_name=c.repo_group.group_name))}
13 13 %else:
14 14 ${h.link_to(_('Admin'),h.url('admin_home'))}
15 15 &raquo;
16 16 ${h.link_to(_('Settings'),h.url('admin_settings'))}
17 17 %endif
18 18 %if current_IntegrationType:
19 19 &raquo;
20 20 %if c.repo:
21 21 ${h.link_to(_('Integrations'),
22 22 request.route_url(route_name='repo_integrations_home',
23 23 repo_name=c.repo.repo_name))}
24 24 %elif c.repo_group:
25 25 ${h.link_to(_('Integrations'),
26 26 request.route_url(route_name='repo_group_integrations_home',
27 27 repo_group_name=c.repo_group.group_name))}
28 28 %else:
29 29 ${h.link_to(_('Integrations'),
30 30 request.route_url(route_name='global_integrations_home'))}
31 31 %endif
32 32 &raquo;
33 33 ${current_IntegrationType.display_name}
34 34 %else:
35 35 &raquo;
36 36 ${_('Integrations')}
37 37 %endif
38 38 </%def>
39 39
40 40 <div class="panel panel-default">
41 41 <div class="panel-heading">
42 42 <h3 class="panel-title">
43 43 %if c.repo:
44 44 ${_('Current Integrations for Repository: {repo_name}').format(repo_name=c.repo.repo_name)}
45 45 %elif c.repo_group:
46 46 ${_('Current Integrations for repository group: {repo_group_name}').format(repo_group_name=c.repo_group.group_name)}
47 47 %else:
48 48 ${_('Current Integrations')}
49 49 %endif
50 50 </h3>
51 51 </div>
52 52 <div class="panel-body">
53 53 <%
54 54 if c.repo:
55 55 home_url = request.route_path('repo_integrations_home',
56 56 repo_name=c.repo.repo_name)
57 57 elif c.repo_group:
58 58 home_url = request.route_path('repo_group_integrations_home',
59 59 repo_group_name=c.repo_group.group_name)
60 60 else:
61 61 home_url = request.route_path('global_integrations_home')
62 62 %>
63 63
64 64 <a href="${home_url}" class="btn ${not current_IntegrationType and 'btn-primary' or ''}">${_('All')}</a>
65 65
66 66 %for integration_key, IntegrationType in available_integrations.items():
67 67 <%
68 68 if c.repo:
69 69 list_url = request.route_path('repo_integrations_list',
70 70 repo_name=c.repo.repo_name,
71 71 integration=integration_key)
72 72 elif c.repo_group:
73 73 list_url = request.route_path('repo_group_integrations_list',
74 74 repo_group_name=c.repo_group.group_name,
75 75 integration=integration_key)
76 76 else:
77 77 list_url = request.route_path('global_integrations_list',
78 78 integration=integration_key)
79 79 %>
80 80 <a href="${list_url}"
81 81 class="btn ${current_IntegrationType and integration_key == current_IntegrationType.key and 'btn-primary' or ''}">
82 82 ${IntegrationType.display_name}
83 83 </a>
84 84 %endfor
85 85
86 86 <%
87 87 if c.repo:
88 88 create_url = h.route_path('repo_integrations_new', repo_name=c.repo.repo_name)
89 89 elif c.repo_group:
90 90 create_url = h.route_path('repo_group_integrations_new', repo_group_name=c.repo_group.group_name)
91 91 else:
92 92 create_url = h.route_path('global_integrations_new')
93 93 %>
94 94 <p class="pull-right">
95 95 <a href="${create_url}" class="btn btn-small btn-success">${_(u'Create new integration')}</a>
96 96 </p>
97 97
98 98 <table class="rctable integrations">
99 99 <thead>
100 100 <tr>
101 101 <th><a href="?sort=enabled:${rev_sort_dir}">${_('Enabled')}</a></th>
102 102 <th><a href="?sort=name:${rev_sort_dir}">${_('Name')}</a></th>
103 103 <th colspan="2"><a href="?sort=integration_type:${rev_sort_dir}">${_('Type')}</a></th>
104 104 <th><a href="?sort=scope:${rev_sort_dir}">${_('Scope')}</a></th>
105 105 <th>${_('Actions')}</th>
106 106 <th></th>
107 107 </tr>
108 108 </thead>
109 109 <tbody>
110 110 %if not integrations_list:
111 111 <tr>
112 112 <td colspan="7">
113 113 <% integration_type = current_IntegrationType and current_IntegrationType.display_name or '' %>
114 114 %if c.repo:
115 115 ${_('No {type} integrations for repo {repo} exist yet.').format(type=integration_type, repo=c.repo.repo_name)}
116 116 %elif c.repo_group:
117 117 ${_('No {type} integrations for repogroup {repogroup} exist yet.').format(type=integration_type, repogroup=c.repo_group.group_name)}
118 118 %else:
119 119 ${_('No {type} integrations exist yet.').format(type=integration_type)}
120 120 %endif
121 121
122 122 %if current_IntegrationType:
123 123 <%
124 124 if c.repo:
125 125 create_url = h.route_path('repo_integrations_create', repo_name=c.repo.repo_name, integration=current_IntegrationType.key)
126 126 elif c.repo_group:
127 127 create_url = h.route_path('repo_group_integrations_create', repo_group_name=c.repo_group.group_name, integration=current_IntegrationType.key)
128 128 else:
129 129 create_url = h.route_path('global_integrations_create', integration=current_IntegrationType.key)
130 130 %>
131 131 %endif
132 132
133 133 <a href="${create_url}">${_(u'Create one')}</a>
134 134 </td>
135 135 </tr>
136 136 %endif
137 137 %for IntegrationType, integration in integrations_list:
138 138 <tr id="integration_${integration.integration_id}">
139 139 <td class="td-enabled">
140 140 %if integration.enabled:
141 141 <div class="flag_status approved pull-left"></div>
142 142 %else:
143 143 <div class="flag_status rejected pull-left"></div>
144 144 %endif
145 145 </td>
146 146 <td class="td-description">
147 147 ${integration.name}
148 148 </td>
149 149 <td class="td-icon">
150 150 %if integration.integration_type in available_integrations:
151 151 <div class="integration-icon">
152 152 ${available_integrations[integration.integration_type].icon|n}
153 153 </div>
154 154 %else:
155 155 ?
156 156 %endif
157 157 </td>
158 158 <td class="td-type">
159 159 ${integration.integration_type}
160 160 </td>
161 161 <td class="td-scope">
162 162 %if integration.repo:
163 163 <a href="${h.url('summary_home', repo_name=integration.repo.repo_name)}">
164 164 ${_('repo')}:${integration.repo.repo_name}
165 165 </a>
166 166 %elif integration.repo_group:
167 167 <a href="${h.url('repo_group_home', group_name=integration.repo_group.group_name)}">
168 168 ${_('repogroup')}:${integration.repo_group.group_name}
169 %if integration.child_repos_only:
170 ${_('child repos only')}
171 %else:
172 ${_('cascade to all')}
173 %endif
169 174 </a>
170 175 %else:
171 %if integration.scope == 'root_repos':
176 %if integration.child_repos_only:
172 177 ${_('top level repos only')}
173 %elif integration.scope == 'global':
178 %else:
174 179 ${_('global')}
175 %else:
176 ${_('unknown scope')}: ${integration.scope}
177 180 %endif
178 181 </td>
179 182 %endif
180 183 <td class="td-action">
181 184 %if not IntegrationType:
182 185 ${_('unknown integration')}
183 186 %else:
184 187 <%
185 188 if c.repo:
186 189 edit_url = request.route_path('repo_integrations_edit',
187 190 repo_name=c.repo.repo_name,
188 191 integration=integration.integration_type,
189 192 integration_id=integration.integration_id)
190 193 elif c.repo_group:
191 194 edit_url = request.route_path('repo_group_integrations_edit',
192 195 repo_group_name=c.repo_group.group_name,
193 196 integration=integration.integration_type,
194 197 integration_id=integration.integration_id)
195 198 else:
196 199 edit_url = request.route_path('global_integrations_edit',
197 200 integration=integration.integration_type,
198 201 integration_id=integration.integration_id)
199 202 %>
200 203 <div class="grid_edit">
201 204 <a href="${edit_url}">${_('Edit')}</a>
202 205 </div>
203 206 <div class="grid_delete">
204 207 <a href="${edit_url}"
205 208 class="btn btn-link btn-danger delete_integration_entry"
206 209 data-desc="${integration.name}"
207 210 data-uid="${integration.integration_id}">
208 211 ${_('Delete')}
209 212 </a>
210 213 </div>
211 214 %endif
212 215 </td>
213 216 </tr>
214 217 %endfor
215 218 <tr id="last-row"></tr>
216 219 </tbody>
217 220 </table>
218 221 <div class="integrations-paginator">
219 222 <div class="pagination-wh pagination-left">
220 223 ${integrations_list.pager('$link_previous ~2~ $link_next')}
221 224 </div>
222 225 </div>
223 226 </div>
224 227 </div>
225 228 <script type="text/javascript">
226 229 var delete_integration = function(entry) {
227 230 if (confirm("Confirm to remove this integration: "+$(entry).data('desc'))) {
228 231 var request = $.ajax({
229 232 type: "POST",
230 233 url: $(entry).attr('href'),
231 234 data: {
232 235 'delete': 'delete',
233 236 'csrf_token': CSRF_TOKEN
234 237 },
235 238 success: function(){
236 239 location.reload();
237 240 },
238 241 error: function(data, textStatus, errorThrown){
239 242 alert("Error while deleting entry.\nError code {0} ({1}). URL: {2}".format(data.status,data.statusText,$(entry)[0].url));
240 243 }
241 244 });
242 245 };
243 246 }
244 247
245 248 $('.delete_integration_entry').on('click', function(e){
246 249 e.preventDefault();
247 250 delete_integration(this);
248 251 });
249 252 </script> No newline at end of file
@@ -1,262 +1,264 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import mock
22 22 import pytest
23 23 from webob.exc import HTTPNotFound
24 24
25 25 import rhodecode
26 26 from rhodecode.model.db import Integration
27 27 from rhodecode.model.meta import Session
28 28 from rhodecode.tests import assert_session_flash, url, TEST_USER_ADMIN_LOGIN
29 29 from rhodecode.tests.utils import AssertResponse
30 30 from rhodecode.integrations import integration_type_registry
31 31 from rhodecode.config.routing import ADMIN_PREFIX
32 32
33 33
34 34 @pytest.mark.usefixtures('app', 'autologin_user')
35 35 class TestIntegrationsView(object):
36 36 pass
37 37
38 38
39 39 class TestGlobalIntegrationsView(TestIntegrationsView):
40 40 def test_index_no_integrations(self, app):
41 41 url = ADMIN_PREFIX + '/integrations'
42 42 response = app.get(url)
43 43
44 44 assert response.status_code == 200
45 45 assert 'exist yet' in response.body
46 46
47 47 def test_index_with_integrations(self, app, global_integration_stub):
48 48 url = ADMIN_PREFIX + '/integrations'
49 49 response = app.get(url)
50 50
51 51 assert response.status_code == 200
52 52 assert 'exist yet' not in response.body
53 53 assert global_integration_stub.name in response.body
54 54
55 55 def test_new_integration_page(self, app):
56 56 url = ADMIN_PREFIX + '/integrations/new'
57 57
58 58 response = app.get(url)
59 59
60 60 assert response.status_code == 200
61 61
62 62 for integration_key in integration_type_registry:
63 63 nurl = (ADMIN_PREFIX + '/integrations/{integration}/new').format(
64 64 integration=integration_key)
65 65 assert nurl in response.body
66 66
67 67 @pytest.mark.parametrize(
68 68 'IntegrationType', integration_type_registry.values())
69 69 def test_get_create_integration_page(self, app, IntegrationType):
70 70 url = ADMIN_PREFIX + '/integrations/{integration_key}/new'.format(
71 71 integration_key=IntegrationType.key)
72 72
73 73 response = app.get(url)
74 74
75 75 assert response.status_code == 200
76 76 assert IntegrationType.display_name in response.body
77 77
78 78 def test_post_integration_page(self, app, StubIntegrationType, csrf_token,
79 79 test_repo_group, backend_random):
80 80 url = ADMIN_PREFIX + '/integrations/{integration_key}/new'.format(
81 81 integration_key=StubIntegrationType.key)
82 82
83 83 _post_integration_test_helper(app, url, csrf_token, admin_view=True,
84 84 repo=backend_random.repo, repo_group=test_repo_group)
85 85
86 86
87 87 class TestRepoGroupIntegrationsView(TestIntegrationsView):
88 88 def test_index_no_integrations(self, app, test_repo_group):
89 89 url = '/{repo_group_name}/settings/integrations'.format(
90 90 repo_group_name=test_repo_group.group_name)
91 91 response = app.get(url)
92 92
93 93 assert response.status_code == 200
94 94 assert 'exist yet' in response.body
95 95
96 96 def test_index_with_integrations(self, app, test_repo_group,
97 97 repogroup_integration_stub):
98 98 url = '/{repo_group_name}/settings/integrations'.format(
99 99 repo_group_name=test_repo_group.group_name)
100 100
101 101 stub_name = repogroup_integration_stub.name
102 102 response = app.get(url)
103 103
104 104 assert response.status_code == 200
105 105 assert 'exist yet' not in response.body
106 106 assert stub_name in response.body
107 107
108 108 def test_new_integration_page(self, app, test_repo_group):
109 109 repo_group_name = test_repo_group.group_name
110 110 url = '/{repo_group_name}/settings/integrations/new'.format(
111 111 repo_group_name=test_repo_group.group_name)
112 112
113 113 response = app.get(url)
114 114
115 115 assert response.status_code == 200
116 116
117 117 for integration_key in integration_type_registry:
118 118 nurl = ('/{repo_group_name}/settings/integrations'
119 119 '/{integration}/new').format(
120 120 repo_group_name=repo_group_name,
121 121 integration=integration_key)
122 122
123 123 assert nurl in response.body
124 124
125 125 @pytest.mark.parametrize(
126 126 'IntegrationType', integration_type_registry.values())
127 127 def test_get_create_integration_page(self, app, test_repo_group,
128 128 IntegrationType):
129 129 repo_group_name = test_repo_group.group_name
130 130 url = ('/{repo_group_name}/settings/integrations/{integration_key}/new'
131 131 ).format(repo_group_name=repo_group_name,
132 132 integration_key=IntegrationType.key)
133 133
134 134 response = app.get(url)
135 135
136 136 assert response.status_code == 200
137 137 assert IntegrationType.display_name in response.body
138 138
139 139 def test_post_integration_page(self, app, test_repo_group, backend_random,
140 140 StubIntegrationType, csrf_token):
141 141 repo_group_name = test_repo_group.group_name
142 142 url = ('/{repo_group_name}/settings/integrations/{integration_key}/new'
143 143 ).format(repo_group_name=repo_group_name,
144 144 integration_key=StubIntegrationType.key)
145 145
146 146 _post_integration_test_helper(app, url, csrf_token, admin_view=False,
147 147 repo=backend_random.repo, repo_group=test_repo_group)
148 148
149 149
150 150 class TestRepoIntegrationsView(TestIntegrationsView):
151 151 def test_index_no_integrations(self, app, backend_random):
152 152 url = '/{repo_name}/settings/integrations'.format(
153 153 repo_name=backend_random.repo.repo_name)
154 154 response = app.get(url)
155 155
156 156 assert response.status_code == 200
157 157 assert 'exist yet' in response.body
158 158
159 159 def test_index_with_integrations(self, app, repo_integration_stub):
160 160 url = '/{repo_name}/settings/integrations'.format(
161 161 repo_name=repo_integration_stub.repo.repo_name)
162 162 stub_name = repo_integration_stub.name
163 163
164 164 response = app.get(url)
165 165
166 166 assert response.status_code == 200
167 167 assert stub_name in response.body
168 168 assert 'exist yet' not in response.body
169 169
170 170 def test_new_integration_page(self, app, backend_random):
171 171 repo_name = backend_random.repo.repo_name
172 172 url = '/{repo_name}/settings/integrations/new'.format(
173 173 repo_name=repo_name)
174 174
175 175 response = app.get(url)
176 176
177 177 assert response.status_code == 200
178 178
179 179 for integration_key in integration_type_registry:
180 180 nurl = ('/{repo_name}/settings/integrations'
181 181 '/{integration}/new').format(
182 182 repo_name=repo_name,
183 183 integration=integration_key)
184 184
185 185 assert nurl in response.body
186 186
187 187 @pytest.mark.parametrize(
188 188 'IntegrationType', integration_type_registry.values())
189 189 def test_get_create_integration_page(self, app, backend_random,
190 190 IntegrationType):
191 191 repo_name = backend_random.repo.repo_name
192 192 url = '/{repo_name}/settings/integrations/{integration_key}/new'.format(
193 193 repo_name=repo_name, integration_key=IntegrationType.key)
194 194
195 195 response = app.get(url)
196 196
197 197 assert response.status_code == 200
198 198 assert IntegrationType.display_name in response.body
199 199
200 200 def test_post_integration_page(self, app, backend_random, test_repo_group,
201 201 StubIntegrationType, csrf_token):
202 202 repo_name = backend_random.repo.repo_name
203 203 url = '/{repo_name}/settings/integrations/{integration_key}/new'.format(
204 204 repo_name=repo_name, integration_key=StubIntegrationType.key)
205 205
206 206 _post_integration_test_helper(app, url, csrf_token, admin_view=False,
207 207 repo=backend_random.repo, repo_group=test_repo_group)
208 208
209 209
210 210 def _post_integration_test_helper(app, url, csrf_token, repo, repo_group,
211 211 admin_view):
212 212 """
213 213 Posts form data to create integration at the url given then deletes it and
214 214 checks if the redirect url is correct.
215 215 """
216 216
217 217 app.post(url, params={}, status=403) # missing csrf check
218 218 response = app.post(url, params={'csrf_token': csrf_token})
219 219 assert response.status_code == 200
220 220 assert 'Errors exist' in response.body
221 221
222 222 scopes_destinations = [
223 223 ('global',
224 224 ADMIN_PREFIX + '/integrations'),
225 ('root_repos',
225 ('root-repos',
226 226 ADMIN_PREFIX + '/integrations'),
227 227 ('repo:%s' % repo.repo_name,
228 228 '/%s/settings/integrations' % repo.repo_name),
229 229 ('repogroup:%s' % repo_group.group_name,
230 230 '/%s/settings/integrations' % repo_group.group_name),
231 ('repogroup-recursive:%s' % repo_group.group_name,
232 '/%s/settings/integrations' % repo_group.group_name),
231 233 ]
232 234
233 235 for scope, destination in scopes_destinations:
234 236 if admin_view:
235 237 destination = ADMIN_PREFIX + '/integrations'
236 238
237 239 form_data = [
238 240 ('csrf_token', csrf_token),
239 241 ('__start__', 'options:mapping'),
240 242 ('name', 'test integration'),
241 243 ('scope', scope),
242 244 ('enabled', 'true'),
243 245 ('__end__', 'options:mapping'),
244 246 ('__start__', 'settings:mapping'),
245 247 ('test_int_field', '34'),
246 248 ('test_string_field', ''), # empty value on purpose as it's required
247 249 ('__end__', 'settings:mapping'),
248 250 ]
249 251 errors_response = app.post(url, form_data)
250 252 assert 'Errors exist' in errors_response.body
251 253
252 254 form_data[-2] = ('test_string_field', 'data!')
253 255 assert Session().query(Integration).count() == 0
254 256 created_response = app.post(url, form_data)
255 257 assert Session().query(Integration).count() == 1
256 258
257 259 delete_response = app.post(
258 260 created_response.location,
259 261 params={'csrf_token': csrf_token, 'delete': 'delete'})
260 262
261 263 assert Session().query(Integration).count() == 0
262 264 assert delete_response.location.endswith(destination)
@@ -1,192 +1,222 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import time
22 22 import pytest
23 23
24 24 from rhodecode import events
25 25 from rhodecode.tests.fixture import Fixture
26 26 from rhodecode.model.db import Session, Integration
27 27 from rhodecode.model.integration import IntegrationModel
28 28 from rhodecode.integrations.types.base import IntegrationTypeBase
29 29
30 30
31 31 class TestDeleteScopesDeletesIntegrations(object):
32 32 def test_delete_repo_with_integration_deletes_integration(self,
33 33 repo_integration_stub):
34 34 Session().delete(repo_integration_stub.repo)
35 35 Session().commit()
36 36 Session().expire_all()
37 37 integration = Integration.get(repo_integration_stub.integration_id)
38 38 assert integration is None
39 39
40 40
41 41 def test_delete_repo_group_with_integration_deletes_integration(self,
42 42 repogroup_integration_stub):
43 43 Session().delete(repogroup_integration_stub.repo_group)
44 44 Session().commit()
45 45 Session().expire_all()
46 46 integration = Integration.get(repogroup_integration_stub.integration_id)
47 47 assert integration is None
48 48
49 49
50 50 @pytest.fixture
51 51 def integration_repos(request, StubIntegrationType, stub_integration_settings):
52 52 """
53 53 Create repositories and integrations for testing, and destroy them after
54
55 Structure:
56 root_repo
57 parent_group/
58 parent_repo
59 child_group/
60 child_repo
61 other_group/
62 other_repo
54 63 """
55 64 fixture = Fixture()
56 65
57 repo_group_1_id = 'int_test_repo_group_1_%s' % time.time()
58 repo_group_1 = fixture.create_repo_group(repo_group_1_id)
59 repo_group_2_id = 'int_test_repo_group_2_%s' % time.time()
60 repo_group_2 = fixture.create_repo_group(repo_group_2_id)
66
67 parent_group_id = 'int_test_parent_group_%s' % time.time()
68 parent_group = fixture.create_repo_group(parent_group_id)
69
70 other_group_id = 'int_test_other_group_%s' % time.time()
71 other_group = fixture.create_repo_group(other_group_id)
61 72
62 repo_1_id = 'int_test_repo_1_%s' % time.time()
63 repo_1 = fixture.create_repo(repo_1_id, repo_group=repo_group_1)
64 repo_2_id = 'int_test_repo_2_%s' % time.time()
65 repo_2 = fixture.create_repo(repo_2_id, repo_group=repo_group_2)
73 child_group_id = (
74 parent_group_id + '/' + 'int_test_child_group_%s' % time.time())
75 child_group = fixture.create_repo_group(child_group_id)
76
77 parent_repo_id = 'int_test_parent_repo_%s' % time.time()
78 parent_repo = fixture.create_repo(parent_repo_id, repo_group=parent_group)
79
80 child_repo_id = 'int_test_child_repo_%s' % time.time()
81 child_repo = fixture.create_repo(child_repo_id, repo_group=child_group)
82
83 other_repo_id = 'int_test_other_repo_%s' % time.time()
84 other_repo = fixture.create_repo(other_repo_id, repo_group=other_group)
66 85
67 86 root_repo_id = 'int_test_repo_root_%s' % time.time()
68 87 root_repo = fixture.create_repo(root_repo_id)
69 88
70 integration_global = IntegrationModel().create(
71 StubIntegrationType, settings=stub_integration_settings,
72 enabled=True, name='test global integration', scope='global')
73 integration_root_repos = IntegrationModel().create(
74 StubIntegrationType, settings=stub_integration_settings,
75 enabled=True, name='test root repos integration', scope='root_repos')
76 integration_repo_1 = IntegrationModel().create(
77 StubIntegrationType, settings=stub_integration_settings,
78 enabled=True, name='test repo 1 integration', scope=repo_1)
79 integration_repo_group_1 = IntegrationModel().create(
80 StubIntegrationType, settings=stub_integration_settings,
81 enabled=True, name='test repo group 1 integration', scope=repo_group_1)
82 integration_repo_2 = IntegrationModel().create(
83 StubIntegrationType, settings=stub_integration_settings,
84 enabled=True, name='test repo 2 integration', scope=repo_2)
85 integration_repo_group_2 = IntegrationModel().create(
86 StubIntegrationType, settings=stub_integration_settings,
87 enabled=True, name='test repo group 2 integration', scope=repo_group_2)
89 integrations = {}
90 for name, repo, repo_group, child_repos_only in [
91 ('global', None, None, None),
92 ('root_repos', None, None, True),
93 ('parent_repo', parent_repo, None, None),
94 ('child_repo', child_repo, None, None),
95 ('other_repo', other_repo, None, None),
96 ('root_repo', root_repo, None, None),
97 ('parent_group', None, parent_group, True),
98 ('parent_group_recursive', None, parent_group, False),
99 ('child_group', None, child_group, True),
100 ('child_group_recursive', None, child_group, False),
101 ('other_group', None, other_group, True),
102 ('other_group_recursive', None, other_group, False),
103 ]:
104 integrations[name] = IntegrationModel().create(
105 StubIntegrationType, settings=stub_integration_settings,
106 enabled=True, name='test %s integration' % name,
107 repo=repo, repo_group=repo_group, child_repos_only=child_repos_only)
88 108
89 109 Session().commit()
90 110
91 111 def _cleanup():
92 Session().delete(integration_global)
93 Session().delete(integration_root_repos)
94 Session().delete(integration_repo_1)
95 Session().delete(integration_repo_group_1)
96 Session().delete(integration_repo_2)
97 Session().delete(integration_repo_group_2)
112 for integration in integrations.values():
113 Session.delete(integration)
114
98 115 fixture.destroy_repo(root_repo)
99 fixture.destroy_repo(repo_1)
100 fixture.destroy_repo(repo_2)
101 fixture.destroy_repo_group(repo_group_1)
102 fixture.destroy_repo_group(repo_group_2)
116 fixture.destroy_repo(child_repo)
117 fixture.destroy_repo(parent_repo)
118 fixture.destroy_repo(other_repo)
119 fixture.destroy_repo_group(child_group)
120 fixture.destroy_repo_group(parent_group)
121 fixture.destroy_repo_group(other_group)
103 122
104 123 request.addfinalizer(_cleanup)
105 124
106 125 return {
126 'integrations': integrations,
107 127 'repos': {
108 'repo_1': repo_1,
109 'repo_2': repo_2,
110 128 'root_repo': root_repo,
111 },
112 'repo_groups': {
113 'repo_group_1': repo_group_1,
114 'repo_group_2': repo_group_2,
115 },
116 'integrations': {
117 'global': integration_global,
118 'root_repos': integration_root_repos,
119 'repo_1': integration_repo_1,
120 'repo_2': integration_repo_2,
121 'repo_group_1': integration_repo_group_1,
122 'repo_group_2': integration_repo_group_2,
129 'other_repo': other_repo,
130 'parent_repo': parent_repo,
131 'child_repo': child_repo,
123 132 }
124 133 }
125 134
126 135
127 136 def test_enabled_integration_repo_scopes(integration_repos):
128 137 integrations = integration_repos['integrations']
129 138 repos = integration_repos['repos']
130 139
131 140 triggered_integrations = IntegrationModel().get_for_event(
132 141 events.RepoEvent(repos['root_repo']))
133 142
134 143 assert triggered_integrations == [
135 144 integrations['global'],
136 integrations['root_repos']
145 integrations['root_repos'],
146 integrations['root_repo'],
147 ]
148
149
150 triggered_integrations = IntegrationModel().get_for_event(
151 events.RepoEvent(repos['other_repo']))
152
153 assert triggered_integrations == [
154 integrations['global'],
155 integrations['other_repo'],
156 integrations['other_group'],
157 integrations['other_group_recursive'],
137 158 ]
138 159
139 160
140 161 triggered_integrations = IntegrationModel().get_for_event(
141 events.RepoEvent(repos['repo_1']))
162 events.RepoEvent(repos['parent_repo']))
142 163
143 164 assert triggered_integrations == [
144 165 integrations['global'],
145 integrations['repo_1'],
146 integrations['repo_group_1']
166 integrations['parent_repo'],
167 integrations['parent_group'],
168 integrations['parent_group_recursive'],
147 169 ]
148 170
149
150 171 triggered_integrations = IntegrationModel().get_for_event(
151 events.RepoEvent(repos['repo_2']))
172 events.RepoEvent(repos['child_repo']))
152 173
153 174 assert triggered_integrations == [
154 175 integrations['global'],
155 integrations['repo_2'],
156 integrations['repo_group_2'],
176 integrations['child_repo'],
177 integrations['parent_group_recursive'],
178 integrations['child_group'],
179 integrations['child_group_recursive'],
157 180 ]
158 181
159 182
160 183 def test_disabled_integration_repo_scopes(integration_repos):
161 184 integrations = integration_repos['integrations']
162 185 repos = integration_repos['repos']
163 186
164 187 for integration in integrations.values():
165 188 integration.enabled = False
166 189 Session().commit()
167 190
168 191 triggered_integrations = IntegrationModel().get_for_event(
169 192 events.RepoEvent(repos['root_repo']))
170 193
171 194 assert triggered_integrations == []
172 195
173 196
174 197 triggered_integrations = IntegrationModel().get_for_event(
175 events.RepoEvent(repos['repo_1']))
198 events.RepoEvent(repos['parent_repo']))
176 199
177 200 assert triggered_integrations == []
178 201
179 202
180 203 triggered_integrations = IntegrationModel().get_for_event(
181 events.RepoEvent(repos['repo_2']))
204 events.RepoEvent(repos['child_repo']))
182 205
183 206 assert triggered_integrations == []
184 207
185 208
209 triggered_integrations = IntegrationModel().get_for_event(
210 events.RepoEvent(repos['other_repo']))
211
212 assert triggered_integrations == []
213
214
215
186 216 def test_enabled_non_repo_integrations(integration_repos):
187 217 integrations = integration_repos['integrations']
188 218
189 219 triggered_integrations = IntegrationModel().get_for_event(
190 220 events.UserPreCreate({}))
191 221
192 222 assert triggered_integrations == [integrations['global']]
@@ -1,120 +1,171 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import colander
22 22 import pytest
23 23
24 24 from rhodecode.model import validation_schema
25 25
26 26 from rhodecode.integrations import integration_type_registry
27 27 from rhodecode.integrations.types.base import IntegrationTypeBase
28 28 from rhodecode.model.validation_schema.schemas.integration_schema import (
29 29 make_integration_schema
30 30 )
31 31
32 32
33 33 @pytest.mark.usefixtures('app', 'autologin_user')
34 34 class TestIntegrationSchema(object):
35 35
36 36 def test_deserialize_integration_schema_perms(self, backend_random,
37 37 test_repo_group,
38 38 StubIntegrationType):
39 39
40 40 repo = backend_random.repo
41 41 repo_group = test_repo_group
42 42
43 43
44 44 empty_perms_dict = {
45 45 'global': [],
46 46 'repositories': {},
47 47 'repositories_groups': {},
48 48 }
49 49
50 perms_tests = {
51 ('repo:%s' % repo.repo_name, repo): [
52 ({}, False),
53 ({'global': ['hg.admin']}, True),
54 ({'global': []}, False),
55 ({'repositories': {repo.repo_name: 'repository.admin'}}, True),
56 ({'repositories': {repo.repo_name: 'repository.read'}}, False),
57 ({'repositories': {repo.repo_name: 'repository.write'}}, False),
58 ({'repositories': {repo.repo_name: 'repository.none'}}, False),
59 ],
60 ('repogroup:%s' % repo_group.group_name, repo_group): [
61 ({}, False),
62 ({'global': ['hg.admin']}, True),
63 ({'global': []}, False),
64 ({'repositories_groups':
65 {repo_group.group_name: 'group.admin'}}, True),
66 ({'repositories_groups':
67 {repo_group.group_name: 'group.read'}}, False),
68 ({'repositories_groups':
69 {repo_group.group_name: 'group.write'}}, False),
70 ({'repositories_groups':
71 {repo_group.group_name: 'group.none'}}, False),
72 ],
73 ('global', 'global'): [
74 ({}, False),
75 ({'global': ['hg.admin']}, True),
76 ({'global': []}, False),
77 ],
78 ('root_repos', 'root_repos'): [
79 ({}, False),
80 ({'global': ['hg.admin']}, True),
81 ({'global': []}, False),
82 ],
83 }
50 perms_tests = [
51 (
52 'repo:%s' % repo.repo_name,
53 {
54 'child_repos_only': None,
55 'repo_group': None,
56 'repo': repo,
57 },
58 [
59 ({}, False),
60 ({'global': ['hg.admin']}, True),
61 ({'global': []}, False),
62 ({'repositories': {repo.repo_name: 'repository.admin'}}, True),
63 ({'repositories': {repo.repo_name: 'repository.read'}}, False),
64 ({'repositories': {repo.repo_name: 'repository.write'}}, False),
65 ({'repositories': {repo.repo_name: 'repository.none'}}, False),
66 ]
67 ),
68 (
69 'repogroup:%s' % repo_group.group_name,
70 {
71 'repo': None,
72 'repo_group': repo_group,
73 'child_repos_only': True,
74 },
75 [
76 ({}, False),
77 ({'global': ['hg.admin']}, True),
78 ({'global': []}, False),
79 ({'repositories_groups':
80 {repo_group.group_name: 'group.admin'}}, True),
81 ({'repositories_groups':
82 {repo_group.group_name: 'group.read'}}, False),
83 ({'repositories_groups':
84 {repo_group.group_name: 'group.write'}}, False),
85 ({'repositories_groups':
86 {repo_group.group_name: 'group.none'}}, False),
87 ]
88 ),
89 (
90 'repogroup-recursive:%s' % repo_group.group_name,
91 {
92 'repo': None,
93 'repo_group': repo_group,
94 'child_repos_only': False,
95 },
96 [
97 ({}, False),
98 ({'global': ['hg.admin']}, True),
99 ({'global': []}, False),
100 ({'repositories_groups':
101 {repo_group.group_name: 'group.admin'}}, True),
102 ({'repositories_groups':
103 {repo_group.group_name: 'group.read'}}, False),
104 ({'repositories_groups':
105 {repo_group.group_name: 'group.write'}}, False),
106 ({'repositories_groups':
107 {repo_group.group_name: 'group.none'}}, False),
108 ]
109 ),
110 (
111 'global',
112 {
113 'repo': None,
114 'repo_group': None,
115 'child_repos_only': False,
116 }, [
117 ({}, False),
118 ({'global': ['hg.admin']}, True),
119 ({'global': []}, False),
120 ]
121 ),
122 (
123 'root-repos',
124 {
125 'repo': None,
126 'repo_group': None,
127 'child_repos_only': True,
128 }, [
129 ({}, False),
130 ({'global': ['hg.admin']}, True),
131 ({'global': []}, False),
132 ]
133 ),
134 ]
84 135
85 for (scope_input, scope_output), perms_allowed in perms_tests.items():
136 for scope_input, scope_output, perms_allowed in perms_tests:
86 137 for perms_update, allowed in perms_allowed:
87 138 perms = dict(empty_perms_dict, **perms_update)
88 139
89 140 schema = make_integration_schema(
90 141 IntegrationType=StubIntegrationType
91 142 ).bind(permissions=perms)
92 143
93 144 input_data = {
94 145 'options': {
95 146 'enabled': 'true',
96 147 'scope': scope_input,
97 148 'name': 'test integration',
98 149 },
99 150 'settings': {
100 151 'test_string_field': 'stringy',
101 152 'test_int_field': '100',
102 153 }
103 154 }
104 155
105 156 if not allowed:
106 157 with pytest.raises(colander.Invalid):
107 158 schema.deserialize(input_data)
108 159 else:
109 160 assert schema.deserialize(input_data) == {
110 161 'options': {
111 162 'enabled': True,
112 163 'scope': scope_output,
113 164 'name': 'test integration',
114 165 },
115 166 'settings': {
116 167 'test_string_field': 'stringy',
117 168 'test_int_field': 100,
118 169 }
119 170 }
120 171
@@ -1,1760 +1,1779 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import collections
22 22 import datetime
23 23 import hashlib
24 24 import os
25 25 import re
26 26 import pprint
27 27 import shutil
28 28 import socket
29 29 import subprocess
30 30 import time
31 31 import uuid
32 32
33 33 import mock
34 34 import pyramid.testing
35 35 import pytest
36 36 import colander
37 37 import requests
38 38 from webtest.app import TestApp
39 39
40 40 import rhodecode
41 41 from rhodecode.model.changeset_status import ChangesetStatusModel
42 42 from rhodecode.model.comment import ChangesetCommentsModel
43 43 from rhodecode.model.db import (
44 44 PullRequest, Repository, RhodeCodeSetting, ChangesetStatus, RepoGroup,
45 45 UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi, Integration)
46 46 from rhodecode.model.meta import Session
47 47 from rhodecode.model.pull_request import PullRequestModel
48 48 from rhodecode.model.repo import RepoModel
49 49 from rhodecode.model.repo_group import RepoGroupModel
50 50 from rhodecode.model.user import UserModel
51 51 from rhodecode.model.settings import VcsSettingsModel
52 52 from rhodecode.model.user_group import UserGroupModel
53 53 from rhodecode.model.integration import IntegrationModel
54 54 from rhodecode.integrations import integration_type_registry
55 55 from rhodecode.integrations.types.base import IntegrationTypeBase
56 56 from rhodecode.lib.utils import repo2db_mapper
57 57 from rhodecode.lib.vcs import create_vcsserver_proxy
58 58 from rhodecode.lib.vcs.backends import get_backend
59 59 from rhodecode.lib.vcs.nodes import FileNode
60 60 from rhodecode.tests import (
61 61 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
62 62 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
63 63 TEST_USER_REGULAR_PASS)
64 64 from rhodecode.tests.fixture import Fixture
65 65
66 66
67 67 def _split_comma(value):
68 68 return value.split(',')
69 69
70 70
71 71 def pytest_addoption(parser):
72 72 parser.addoption(
73 73 '--keep-tmp-path', action='store_true',
74 74 help="Keep the test temporary directories")
75 75 parser.addoption(
76 76 '--backends', action='store', type=_split_comma,
77 77 default=['git', 'hg', 'svn'],
78 78 help="Select which backends to test for backend specific tests.")
79 79 parser.addoption(
80 80 '--dbs', action='store', type=_split_comma,
81 81 default=['sqlite'],
82 82 help="Select which database to test for database specific tests. "
83 83 "Possible options are sqlite,postgres,mysql")
84 84 parser.addoption(
85 85 '--appenlight', '--ae', action='store_true',
86 86 help="Track statistics in appenlight.")
87 87 parser.addoption(
88 88 '--appenlight-api-key', '--ae-key',
89 89 help="API key for Appenlight.")
90 90 parser.addoption(
91 91 '--appenlight-url', '--ae-url',
92 92 default="https://ae.rhodecode.com",
93 93 help="Appenlight service URL, defaults to https://ae.rhodecode.com")
94 94 parser.addoption(
95 95 '--sqlite-connection-string', action='store',
96 96 default='', help="Connection string for the dbs tests with SQLite")
97 97 parser.addoption(
98 98 '--postgres-connection-string', action='store',
99 99 default='', help="Connection string for the dbs tests with Postgres")
100 100 parser.addoption(
101 101 '--mysql-connection-string', action='store',
102 102 default='', help="Connection string for the dbs tests with MySQL")
103 103 parser.addoption(
104 104 '--repeat', type=int, default=100,
105 105 help="Number of repetitions in performance tests.")
106 106
107 107
108 108 def pytest_configure(config):
109 109 # Appy the kombu patch early on, needed for test discovery on Python 2.7.11
110 110 from rhodecode.config import patches
111 111 patches.kombu_1_5_1_python_2_7_11()
112 112
113 113
114 114 def pytest_collection_modifyitems(session, config, items):
115 115 # nottest marked, compare nose, used for transition from nose to pytest
116 116 remaining = [
117 117 i for i in items if getattr(i.obj, '__test__', True)]
118 118 items[:] = remaining
119 119
120 120
121 121 def pytest_generate_tests(metafunc):
122 122 # Support test generation based on --backend parameter
123 123 if 'backend_alias' in metafunc.fixturenames:
124 124 backends = get_backends_from_metafunc(metafunc)
125 125 scope = None
126 126 if not backends:
127 127 pytest.skip("Not enabled for any of selected backends")
128 128 metafunc.parametrize('backend_alias', backends, scope=scope)
129 129 elif hasattr(metafunc.function, 'backends'):
130 130 backends = get_backends_from_metafunc(metafunc)
131 131 if not backends:
132 132 pytest.skip("Not enabled for any of selected backends")
133 133
134 134
135 135 def get_backends_from_metafunc(metafunc):
136 136 requested_backends = set(metafunc.config.getoption('--backends'))
137 137 if hasattr(metafunc.function, 'backends'):
138 138 # Supported backends by this test function, created from
139 139 # pytest.mark.backends
140 140 backends = metafunc.function.backends.args
141 141 elif hasattr(metafunc.cls, 'backend_alias'):
142 142 # Support class attribute "backend_alias", this is mainly
143 143 # for legacy reasons for tests not yet using pytest.mark.backends
144 144 backends = [metafunc.cls.backend_alias]
145 145 else:
146 146 backends = metafunc.config.getoption('--backends')
147 147 return requested_backends.intersection(backends)
148 148
149 149
150 150 @pytest.fixture(scope='session', autouse=True)
151 151 def activate_example_rcextensions(request):
152 152 """
153 153 Patch in an example rcextensions module which verifies passed in kwargs.
154 154 """
155 155 from rhodecode.tests.other import example_rcextensions
156 156
157 157 old_extensions = rhodecode.EXTENSIONS
158 158 rhodecode.EXTENSIONS = example_rcextensions
159 159
160 160 @request.addfinalizer
161 161 def cleanup():
162 162 rhodecode.EXTENSIONS = old_extensions
163 163
164 164
165 165 @pytest.fixture
166 166 def capture_rcextensions():
167 167 """
168 168 Returns the recorded calls to entry points in rcextensions.
169 169 """
170 170 calls = rhodecode.EXTENSIONS.calls
171 171 calls.clear()
172 172 # Note: At this moment, it is still the empty dict, but that will
173 173 # be filled during the test run and since it is a reference this
174 174 # is enough to make it work.
175 175 return calls
176 176
177 177
178 178 @pytest.fixture(scope='session')
179 179 def http_environ_session():
180 180 """
181 181 Allow to use "http_environ" in session scope.
182 182 """
183 183 return http_environ(
184 184 http_host_stub=http_host_stub())
185 185
186 186
187 187 @pytest.fixture
188 188 def http_host_stub():
189 189 """
190 190 Value of HTTP_HOST in the test run.
191 191 """
192 192 return 'test.example.com:80'
193 193
194 194
195 195 @pytest.fixture
196 196 def http_environ(http_host_stub):
197 197 """
198 198 HTTP extra environ keys.
199 199
200 200 User by the test application and as well for setting up the pylons
201 201 environment. In the case of the fixture "app" it should be possible
202 202 to override this for a specific test case.
203 203 """
204 204 return {
205 205 'SERVER_NAME': http_host_stub.split(':')[0],
206 206 'SERVER_PORT': http_host_stub.split(':')[1],
207 207 'HTTP_HOST': http_host_stub,
208 208 }
209 209
210 210
211 211 @pytest.fixture(scope='function')
212 212 def app(request, pylonsapp, http_environ):
213 213 app = TestApp(
214 214 pylonsapp,
215 215 extra_environ=http_environ)
216 216 if request.cls:
217 217 request.cls.app = app
218 218 return app
219 219
220 220
221 221 @pytest.fixture()
222 222 def app_settings(pylonsapp, pylons_config):
223 223 """
224 224 Settings dictionary used to create the app.
225 225
226 226 Parses the ini file and passes the result through the sanitize and apply
227 227 defaults mechanism in `rhodecode.config.middleware`.
228 228 """
229 229 from paste.deploy.loadwsgi import loadcontext, APP
230 230 from rhodecode.config.middleware import (
231 231 sanitize_settings_and_apply_defaults)
232 232 context = loadcontext(APP, 'config:' + pylons_config)
233 233 settings = sanitize_settings_and_apply_defaults(context.config())
234 234 return settings
235 235
236 236
237 237 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
238 238
239 239
240 240 def _autologin_user(app, *args):
241 241 session = login_user_session(app, *args)
242 242 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
243 243 return LoginData(csrf_token, session['rhodecode_user'])
244 244
245 245
246 246 @pytest.fixture
247 247 def autologin_user(app):
248 248 """
249 249 Utility fixture which makes sure that the admin user is logged in
250 250 """
251 251 return _autologin_user(app)
252 252
253 253
254 254 @pytest.fixture
255 255 def autologin_regular_user(app):
256 256 """
257 257 Utility fixture which makes sure that the regular user is logged in
258 258 """
259 259 return _autologin_user(
260 260 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
261 261
262 262
263 263 @pytest.fixture(scope='function')
264 264 def csrf_token(request, autologin_user):
265 265 return autologin_user.csrf_token
266 266
267 267
268 268 @pytest.fixture(scope='function')
269 269 def xhr_header(request):
270 270 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
271 271
272 272
273 273 @pytest.fixture
274 274 def real_crypto_backend(monkeypatch):
275 275 """
276 276 Switch the production crypto backend on for this test.
277 277
278 278 During the test run the crypto backend is replaced with a faster
279 279 implementation based on the MD5 algorithm.
280 280 """
281 281 monkeypatch.setattr(rhodecode, 'is_test', False)
282 282
283 283
284 284 @pytest.fixture(scope='class')
285 285 def index_location(request, pylonsapp):
286 286 index_location = pylonsapp.config['app_conf']['search.location']
287 287 if request.cls:
288 288 request.cls.index_location = index_location
289 289 return index_location
290 290
291 291
292 292 @pytest.fixture(scope='session', autouse=True)
293 293 def tests_tmp_path(request):
294 294 """
295 295 Create temporary directory to be used during the test session.
296 296 """
297 297 if not os.path.exists(TESTS_TMP_PATH):
298 298 os.makedirs(TESTS_TMP_PATH)
299 299
300 300 if not request.config.getoption('--keep-tmp-path'):
301 301 @request.addfinalizer
302 302 def remove_tmp_path():
303 303 shutil.rmtree(TESTS_TMP_PATH)
304 304
305 305 return TESTS_TMP_PATH
306 306
307 307
308 308 @pytest.fixture(scope='session', autouse=True)
309 309 def patch_pyro_request_scope_proxy_factory(request):
310 310 """
311 311 Patch the pyro proxy factory to always use the same dummy request object
312 312 when under test. This will return the same pyro proxy on every call.
313 313 """
314 314 dummy_request = pyramid.testing.DummyRequest()
315 315
316 316 def mocked_call(self, request=None):
317 317 return self.getProxy(request=dummy_request)
318 318
319 319 patcher = mock.patch(
320 320 'rhodecode.lib.vcs.client.RequestScopeProxyFactory.__call__',
321 321 new=mocked_call)
322 322 patcher.start()
323 323
324 324 @request.addfinalizer
325 325 def undo_patching():
326 326 patcher.stop()
327 327
328 328
329 329 @pytest.fixture
330 330 def test_repo_group(request):
331 331 """
332 332 Create a temporary repository group, and destroy it after
333 333 usage automatically
334 334 """
335 335 fixture = Fixture()
336 336 repogroupid = 'test_repo_group_%s' % int(time.time())
337 337 repo_group = fixture.create_repo_group(repogroupid)
338 338
339 339 def _cleanup():
340 340 fixture.destroy_repo_group(repogroupid)
341 341
342 342 request.addfinalizer(_cleanup)
343 343 return repo_group
344 344
345 345
346 346 @pytest.fixture
347 347 def test_user_group(request):
348 348 """
349 349 Create a temporary user group, and destroy it after
350 350 usage automatically
351 351 """
352 352 fixture = Fixture()
353 353 usergroupid = 'test_user_group_%s' % int(time.time())
354 354 user_group = fixture.create_user_group(usergroupid)
355 355
356 356 def _cleanup():
357 357 fixture.destroy_user_group(user_group)
358 358
359 359 request.addfinalizer(_cleanup)
360 360 return user_group
361 361
362 362
363 363 @pytest.fixture(scope='session')
364 364 def test_repo(request):
365 365 container = TestRepoContainer()
366 366 request.addfinalizer(container._cleanup)
367 367 return container
368 368
369 369
370 370 class TestRepoContainer(object):
371 371 """
372 372 Container for test repositories which are used read only.
373 373
374 374 Repositories will be created on demand and re-used during the lifetime
375 375 of this object.
376 376
377 377 Usage to get the svn test repository "minimal"::
378 378
379 379 test_repo = TestContainer()
380 380 repo = test_repo('minimal', 'svn')
381 381
382 382 """
383 383
384 384 dump_extractors = {
385 385 'git': utils.extract_git_repo_from_dump,
386 386 'hg': utils.extract_hg_repo_from_dump,
387 387 'svn': utils.extract_svn_repo_from_dump,
388 388 }
389 389
390 390 def __init__(self):
391 391 self._cleanup_repos = []
392 392 self._fixture = Fixture()
393 393 self._repos = {}
394 394
395 395 def __call__(self, dump_name, backend_alias):
396 396 key = (dump_name, backend_alias)
397 397 if key not in self._repos:
398 398 repo = self._create_repo(dump_name, backend_alias)
399 399 self._repos[key] = repo.repo_id
400 400 return Repository.get(self._repos[key])
401 401
402 402 def _create_repo(self, dump_name, backend_alias):
403 403 repo_name = '%s-%s' % (backend_alias, dump_name)
404 404 backend_class = get_backend(backend_alias)
405 405 dump_extractor = self.dump_extractors[backend_alias]
406 406 repo_path = dump_extractor(dump_name, repo_name)
407 407 vcs_repo = backend_class(repo_path)
408 408 repo2db_mapper({repo_name: vcs_repo})
409 409 repo = RepoModel().get_by_repo_name(repo_name)
410 410 self._cleanup_repos.append(repo_name)
411 411 return repo
412 412
413 413 def _cleanup(self):
414 414 for repo_name in reversed(self._cleanup_repos):
415 415 self._fixture.destroy_repo(repo_name)
416 416
417 417
418 418 @pytest.fixture
419 419 def backend(request, backend_alias, pylonsapp, test_repo):
420 420 """
421 421 Parametrized fixture which represents a single backend implementation.
422 422
423 423 It respects the option `--backends` to focus the test run on specific
424 424 backend implementations.
425 425
426 426 It also supports `pytest.mark.xfail_backends` to mark tests as failing
427 427 for specific backends. This is intended as a utility for incremental
428 428 development of a new backend implementation.
429 429 """
430 430 if backend_alias not in request.config.getoption('--backends'):
431 431 pytest.skip("Backend %s not selected." % (backend_alias, ))
432 432
433 433 utils.check_xfail_backends(request.node, backend_alias)
434 434 utils.check_skip_backends(request.node, backend_alias)
435 435
436 436 repo_name = 'vcs_test_%s' % (backend_alias, )
437 437 backend = Backend(
438 438 alias=backend_alias,
439 439 repo_name=repo_name,
440 440 test_name=request.node.name,
441 441 test_repo_container=test_repo)
442 442 request.addfinalizer(backend.cleanup)
443 443 return backend
444 444
445 445
446 446 @pytest.fixture
447 447 def backend_git(request, pylonsapp, test_repo):
448 448 return backend(request, 'git', pylonsapp, test_repo)
449 449
450 450
451 451 @pytest.fixture
452 452 def backend_hg(request, pylonsapp, test_repo):
453 453 return backend(request, 'hg', pylonsapp, test_repo)
454 454
455 455
456 456 @pytest.fixture
457 457 def backend_svn(request, pylonsapp, test_repo):
458 458 return backend(request, 'svn', pylonsapp, test_repo)
459 459
460 460
461 461 @pytest.fixture
462 462 def backend_random(backend_git):
463 463 """
464 464 Use this to express that your tests need "a backend.
465 465
466 466 A few of our tests need a backend, so that we can run the code. This
467 467 fixture is intended to be used for such cases. It will pick one of the
468 468 backends and run the tests.
469 469
470 470 The fixture `backend` would run the test multiple times for each
471 471 available backend which is a pure waste of time if the test is
472 472 independent of the backend type.
473 473 """
474 474 # TODO: johbo: Change this to pick a random backend
475 475 return backend_git
476 476
477 477
478 478 @pytest.fixture
479 479 def backend_stub(backend_git):
480 480 """
481 481 Use this to express that your tests need a backend stub
482 482
483 483 TODO: mikhail: Implement a real stub logic instead of returning
484 484 a git backend
485 485 """
486 486 return backend_git
487 487
488 488
489 489 @pytest.fixture
490 490 def repo_stub(backend_stub):
491 491 """
492 492 Use this to express that your tests need a repository stub
493 493 """
494 494 return backend_stub.create_repo()
495 495
496 496
497 497 class Backend(object):
498 498 """
499 499 Represents the test configuration for one supported backend
500 500
501 501 Provides easy access to different test repositories based on
502 502 `__getitem__`. Such repositories will only be created once per test
503 503 session.
504 504 """
505 505
506 506 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
507 507 _master_repo = None
508 508 _commit_ids = {}
509 509
510 510 def __init__(self, alias, repo_name, test_name, test_repo_container):
511 511 self.alias = alias
512 512 self.repo_name = repo_name
513 513 self._cleanup_repos = []
514 514 self._test_name = test_name
515 515 self._test_repo_container = test_repo_container
516 516 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
517 517 # Fixture will survive in the end.
518 518 self._fixture = Fixture()
519 519
520 520 def __getitem__(self, key):
521 521 return self._test_repo_container(key, self.alias)
522 522
523 523 @property
524 524 def repo(self):
525 525 """
526 526 Returns the "current" repository. This is the vcs_test repo or the
527 527 last repo which has been created with `create_repo`.
528 528 """
529 529 from rhodecode.model.db import Repository
530 530 return Repository.get_by_repo_name(self.repo_name)
531 531
532 532 @property
533 533 def default_branch_name(self):
534 534 VcsRepository = get_backend(self.alias)
535 535 return VcsRepository.DEFAULT_BRANCH_NAME
536 536
537 537 @property
538 538 def default_head_id(self):
539 539 """
540 540 Returns the default head id of the underlying backend.
541 541
542 542 This will be the default branch name in case the backend does have a
543 543 default branch. In the other cases it will point to a valid head
544 544 which can serve as the base to create a new commit on top of it.
545 545 """
546 546 vcsrepo = self.repo.scm_instance()
547 547 head_id = (
548 548 vcsrepo.DEFAULT_BRANCH_NAME or
549 549 vcsrepo.commit_ids[-1])
550 550 return head_id
551 551
552 552 @property
553 553 def commit_ids(self):
554 554 """
555 555 Returns the list of commits for the last created repository
556 556 """
557 557 return self._commit_ids
558 558
559 559 def create_master_repo(self, commits):
560 560 """
561 561 Create a repository and remember it as a template.
562 562
563 563 This allows to easily create derived repositories to construct
564 564 more complex scenarios for diff, compare and pull requests.
565 565
566 566 Returns a commit map which maps from commit message to raw_id.
567 567 """
568 568 self._master_repo = self.create_repo(commits=commits)
569 569 return self._commit_ids
570 570
571 571 def create_repo(
572 572 self, commits=None, number_of_commits=0, heads=None,
573 573 name_suffix=u'', **kwargs):
574 574 """
575 575 Create a repository and record it for later cleanup.
576 576
577 577 :param commits: Optional. A sequence of dict instances.
578 578 Will add a commit per entry to the new repository.
579 579 :param number_of_commits: Optional. If set to a number, this number of
580 580 commits will be added to the new repository.
581 581 :param heads: Optional. Can be set to a sequence of of commit
582 582 names which shall be pulled in from the master repository.
583 583
584 584 """
585 585 self.repo_name = self._next_repo_name() + name_suffix
586 586 repo = self._fixture.create_repo(
587 587 self.repo_name, repo_type=self.alias, **kwargs)
588 588 self._cleanup_repos.append(repo.repo_name)
589 589
590 590 commits = commits or [
591 591 {'message': 'Commit %s of %s' % (x, self.repo_name)}
592 592 for x in xrange(number_of_commits)]
593 593 self._add_commits_to_repo(repo.scm_instance(), commits)
594 594 if heads:
595 595 self.pull_heads(repo, heads)
596 596
597 597 return repo
598 598
599 599 def pull_heads(self, repo, heads):
600 600 """
601 601 Make sure that repo contains all commits mentioned in `heads`
602 602 """
603 603 vcsmaster = self._master_repo.scm_instance()
604 604 vcsrepo = repo.scm_instance()
605 605 vcsrepo.config.clear_section('hooks')
606 606 commit_ids = [self._commit_ids[h] for h in heads]
607 607 vcsrepo.pull(vcsmaster.path, commit_ids=commit_ids)
608 608
609 609 def create_fork(self):
610 610 repo_to_fork = self.repo_name
611 611 self.repo_name = self._next_repo_name()
612 612 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
613 613 self._cleanup_repos.append(self.repo_name)
614 614 return repo
615 615
616 616 def new_repo_name(self, suffix=u''):
617 617 self.repo_name = self._next_repo_name() + suffix
618 618 self._cleanup_repos.append(self.repo_name)
619 619 return self.repo_name
620 620
621 621 def _next_repo_name(self):
622 622 return u"%s_%s" % (
623 623 self.invalid_repo_name.sub(u'_', self._test_name),
624 624 len(self._cleanup_repos))
625 625
626 626 def ensure_file(self, filename, content='Test content\n'):
627 627 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
628 628 commits = [
629 629 {'added': [
630 630 FileNode(filename, content=content),
631 631 ]},
632 632 ]
633 633 self._add_commits_to_repo(self.repo.scm_instance(), commits)
634 634
635 635 def enable_downloads(self):
636 636 repo = self.repo
637 637 repo.enable_downloads = True
638 638 Session().add(repo)
639 639 Session().commit()
640 640
641 641 def cleanup(self):
642 642 for repo_name in reversed(self._cleanup_repos):
643 643 self._fixture.destroy_repo(repo_name)
644 644
645 645 def _add_commits_to_repo(self, repo, commits):
646 646 commit_ids = _add_commits_to_repo(repo, commits)
647 647 if not commit_ids:
648 648 return
649 649 self._commit_ids = commit_ids
650 650
651 651 # Creating refs for Git to allow fetching them from remote repository
652 652 if self.alias == 'git':
653 653 refs = {}
654 654 for message in self._commit_ids:
655 655 # TODO: mikhail: do more special chars replacements
656 656 ref_name = 'refs/test-refs/{}'.format(
657 657 message.replace(' ', ''))
658 658 refs[ref_name] = self._commit_ids[message]
659 659 self._create_refs(repo, refs)
660 660
661 661 def _create_refs(self, repo, refs):
662 662 for ref_name in refs:
663 663 repo.set_refs(ref_name, refs[ref_name])
664 664
665 665
666 666 @pytest.fixture
667 667 def vcsbackend(request, backend_alias, tests_tmp_path, pylonsapp, test_repo):
668 668 """
669 669 Parametrized fixture which represents a single vcs backend implementation.
670 670
671 671 See the fixture `backend` for more details. This one implements the same
672 672 concept, but on vcs level. So it does not provide model instances etc.
673 673
674 674 Parameters are generated dynamically, see :func:`pytest_generate_tests`
675 675 for how this works.
676 676 """
677 677 if backend_alias not in request.config.getoption('--backends'):
678 678 pytest.skip("Backend %s not selected." % (backend_alias, ))
679 679
680 680 utils.check_xfail_backends(request.node, backend_alias)
681 681 utils.check_skip_backends(request.node, backend_alias)
682 682
683 683 repo_name = 'vcs_test_%s' % (backend_alias, )
684 684 repo_path = os.path.join(tests_tmp_path, repo_name)
685 685 backend = VcsBackend(
686 686 alias=backend_alias,
687 687 repo_path=repo_path,
688 688 test_name=request.node.name,
689 689 test_repo_container=test_repo)
690 690 request.addfinalizer(backend.cleanup)
691 691 return backend
692 692
693 693
694 694 @pytest.fixture
695 695 def vcsbackend_git(request, tests_tmp_path, pylonsapp, test_repo):
696 696 return vcsbackend(request, 'git', tests_tmp_path, pylonsapp, test_repo)
697 697
698 698
699 699 @pytest.fixture
700 700 def vcsbackend_hg(request, tests_tmp_path, pylonsapp, test_repo):
701 701 return vcsbackend(request, 'hg', tests_tmp_path, pylonsapp, test_repo)
702 702
703 703
704 704 @pytest.fixture
705 705 def vcsbackend_svn(request, tests_tmp_path, pylonsapp, test_repo):
706 706 return vcsbackend(request, 'svn', tests_tmp_path, pylonsapp, test_repo)
707 707
708 708
709 709 @pytest.fixture
710 710 def vcsbackend_random(vcsbackend_git):
711 711 """
712 712 Use this to express that your tests need "a vcsbackend".
713 713
714 714 The fixture `vcsbackend` would run the test multiple times for each
715 715 available vcs backend which is a pure waste of time if the test is
716 716 independent of the vcs backend type.
717 717 """
718 718 # TODO: johbo: Change this to pick a random backend
719 719 return vcsbackend_git
720 720
721 721
722 722 @pytest.fixture
723 723 def vcsbackend_stub(vcsbackend_git):
724 724 """
725 725 Use this to express that your test just needs a stub of a vcsbackend.
726 726
727 727 Plan is to eventually implement an in-memory stub to speed tests up.
728 728 """
729 729 return vcsbackend_git
730 730
731 731
732 732 class VcsBackend(object):
733 733 """
734 734 Represents the test configuration for one supported vcs backend.
735 735 """
736 736
737 737 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
738 738
739 739 def __init__(self, alias, repo_path, test_name, test_repo_container):
740 740 self.alias = alias
741 741 self._repo_path = repo_path
742 742 self._cleanup_repos = []
743 743 self._test_name = test_name
744 744 self._test_repo_container = test_repo_container
745 745
746 746 def __getitem__(self, key):
747 747 return self._test_repo_container(key, self.alias).scm_instance()
748 748
749 749 @property
750 750 def repo(self):
751 751 """
752 752 Returns the "current" repository. This is the vcs_test repo of the last
753 753 repo which has been created.
754 754 """
755 755 Repository = get_backend(self.alias)
756 756 return Repository(self._repo_path)
757 757
758 758 @property
759 759 def backend(self):
760 760 """
761 761 Returns the backend implementation class.
762 762 """
763 763 return get_backend(self.alias)
764 764
765 765 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None):
766 766 repo_name = self._next_repo_name()
767 767 self._repo_path = get_new_dir(repo_name)
768 768 repo_class = get_backend(self.alias)
769 769 src_url = None
770 770 if _clone_repo:
771 771 src_url = _clone_repo.path
772 772 repo = repo_class(self._repo_path, create=True, src_url=src_url)
773 773 self._cleanup_repos.append(repo)
774 774
775 775 commits = commits or [
776 776 {'message': 'Commit %s of %s' % (x, repo_name)}
777 777 for x in xrange(number_of_commits)]
778 778 _add_commits_to_repo(repo, commits)
779 779 return repo
780 780
781 781 def clone_repo(self, repo):
782 782 return self.create_repo(_clone_repo=repo)
783 783
784 784 def cleanup(self):
785 785 for repo in self._cleanup_repos:
786 786 shutil.rmtree(repo.path)
787 787
788 788 def new_repo_path(self):
789 789 repo_name = self._next_repo_name()
790 790 self._repo_path = get_new_dir(repo_name)
791 791 return self._repo_path
792 792
793 793 def _next_repo_name(self):
794 794 return "%s_%s" % (
795 795 self.invalid_repo_name.sub('_', self._test_name),
796 796 len(self._cleanup_repos))
797 797
798 798 def add_file(self, repo, filename, content='Test content\n'):
799 799 imc = repo.in_memory_commit
800 800 imc.add(FileNode(filename, content=content))
801 801 imc.commit(
802 802 message=u'Automatic commit from vcsbackend fixture',
803 803 author=u'Automatic')
804 804
805 805 def ensure_file(self, filename, content='Test content\n'):
806 806 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
807 807 self.add_file(self.repo, filename, content)
808 808
809 809
810 810 def _add_commits_to_repo(vcs_repo, commits):
811 811 commit_ids = {}
812 812 if not commits:
813 813 return commit_ids
814 814
815 815 imc = vcs_repo.in_memory_commit
816 816 commit = None
817 817
818 818 for idx, commit in enumerate(commits):
819 819 message = unicode(commit.get('message', 'Commit %s' % idx))
820 820
821 821 for node in commit.get('added', []):
822 822 imc.add(FileNode(node.path, content=node.content))
823 823 for node in commit.get('changed', []):
824 824 imc.change(FileNode(node.path, content=node.content))
825 825 for node in commit.get('removed', []):
826 826 imc.remove(FileNode(node.path))
827 827
828 828 parents = [
829 829 vcs_repo.get_commit(commit_id=commit_ids[p])
830 830 for p in commit.get('parents', [])]
831 831
832 832 operations = ('added', 'changed', 'removed')
833 833 if not any((commit.get(o) for o in operations)):
834 834 imc.add(FileNode('file_%s' % idx, content=message))
835 835
836 836 commit = imc.commit(
837 837 message=message,
838 838 author=unicode(commit.get('author', 'Automatic')),
839 839 date=commit.get('date'),
840 840 branch=commit.get('branch'),
841 841 parents=parents)
842 842
843 843 commit_ids[commit.message] = commit.raw_id
844 844
845 845 return commit_ids
846 846
847 847
848 848 @pytest.fixture
849 849 def reposerver(request):
850 850 """
851 851 Allows to serve a backend repository
852 852 """
853 853
854 854 repo_server = RepoServer()
855 855 request.addfinalizer(repo_server.cleanup)
856 856 return repo_server
857 857
858 858
859 859 class RepoServer(object):
860 860 """
861 861 Utility to serve a local repository for the duration of a test case.
862 862
863 863 Supports only Subversion so far.
864 864 """
865 865
866 866 url = None
867 867
868 868 def __init__(self):
869 869 self._cleanup_servers = []
870 870
871 871 def serve(self, vcsrepo):
872 872 if vcsrepo.alias != 'svn':
873 873 raise TypeError("Backend %s not supported" % vcsrepo.alias)
874 874
875 875 proc = subprocess.Popen(
876 876 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
877 877 '--root', vcsrepo.path])
878 878 self._cleanup_servers.append(proc)
879 879 self.url = 'svn://localhost'
880 880
881 881 def cleanup(self):
882 882 for proc in self._cleanup_servers:
883 883 proc.terminate()
884 884
885 885
886 886 @pytest.fixture
887 887 def pr_util(backend, request):
888 888 """
889 889 Utility for tests of models and for functional tests around pull requests.
890 890
891 891 It gives an instance of :class:`PRTestUtility` which provides various
892 892 utility methods around one pull request.
893 893
894 894 This fixture uses `backend` and inherits its parameterization.
895 895 """
896 896
897 897 util = PRTestUtility(backend)
898 898
899 899 @request.addfinalizer
900 900 def cleanup():
901 901 util.cleanup()
902 902
903 903 return util
904 904
905 905
906 906 class PRTestUtility(object):
907 907
908 908 pull_request = None
909 909 pull_request_id = None
910 910 mergeable_patcher = None
911 911 mergeable_mock = None
912 912 notification_patcher = None
913 913
914 914 def __init__(self, backend):
915 915 self.backend = backend
916 916
917 917 def create_pull_request(
918 918 self, commits=None, target_head=None, source_head=None,
919 919 revisions=None, approved=False, author=None, mergeable=False,
920 920 enable_notifications=True, name_suffix=u'', reviewers=None,
921 921 title=u"Test", description=u"Description"):
922 922 self.set_mergeable(mergeable)
923 923 if not enable_notifications:
924 924 # mock notification side effect
925 925 self.notification_patcher = mock.patch(
926 926 'rhodecode.model.notification.NotificationModel.create')
927 927 self.notification_patcher.start()
928 928
929 929 if not self.pull_request:
930 930 if not commits:
931 931 commits = [
932 932 {'message': 'c1'},
933 933 {'message': 'c2'},
934 934 {'message': 'c3'},
935 935 ]
936 936 target_head = 'c1'
937 937 source_head = 'c2'
938 938 revisions = ['c2']
939 939
940 940 self.commit_ids = self.backend.create_master_repo(commits)
941 941 self.target_repository = self.backend.create_repo(
942 942 heads=[target_head], name_suffix=name_suffix)
943 943 self.source_repository = self.backend.create_repo(
944 944 heads=[source_head], name_suffix=name_suffix)
945 945 self.author = author or UserModel().get_by_username(
946 946 TEST_USER_ADMIN_LOGIN)
947 947
948 948 model = PullRequestModel()
949 949 self.create_parameters = {
950 950 'created_by': self.author,
951 951 'source_repo': self.source_repository.repo_name,
952 952 'source_ref': self._default_branch_reference(source_head),
953 953 'target_repo': self.target_repository.repo_name,
954 954 'target_ref': self._default_branch_reference(target_head),
955 955 'revisions': [self.commit_ids[r] for r in revisions],
956 956 'reviewers': reviewers or self._get_reviewers(),
957 957 'title': title,
958 958 'description': description,
959 959 }
960 960 self.pull_request = model.create(**self.create_parameters)
961 961 assert model.get_versions(self.pull_request) == []
962 962
963 963 self.pull_request_id = self.pull_request.pull_request_id
964 964
965 965 if approved:
966 966 self.approve()
967 967
968 968 Session().add(self.pull_request)
969 969 Session().commit()
970 970
971 971 return self.pull_request
972 972
973 973 def approve(self):
974 974 self.create_status_votes(
975 975 ChangesetStatus.STATUS_APPROVED,
976 976 *self.pull_request.reviewers)
977 977
978 978 def close(self):
979 979 PullRequestModel().close_pull_request(self.pull_request, self.author)
980 980
981 981 def _default_branch_reference(self, commit_message):
982 982 reference = '%s:%s:%s' % (
983 983 'branch',
984 984 self.backend.default_branch_name,
985 985 self.commit_ids[commit_message])
986 986 return reference
987 987
988 988 def _get_reviewers(self):
989 989 model = UserModel()
990 990 return [
991 991 model.get_by_username(TEST_USER_REGULAR_LOGIN),
992 992 model.get_by_username(TEST_USER_REGULAR2_LOGIN),
993 993 ]
994 994
995 995 def update_source_repository(self, head=None):
996 996 heads = [head or 'c3']
997 997 self.backend.pull_heads(self.source_repository, heads=heads)
998 998
999 999 def add_one_commit(self, head=None):
1000 1000 self.update_source_repository(head=head)
1001 1001 old_commit_ids = set(self.pull_request.revisions)
1002 1002 PullRequestModel().update_commits(self.pull_request)
1003 1003 commit_ids = set(self.pull_request.revisions)
1004 1004 new_commit_ids = commit_ids - old_commit_ids
1005 1005 assert len(new_commit_ids) == 1
1006 1006 return new_commit_ids.pop()
1007 1007
1008 1008 def remove_one_commit(self):
1009 1009 assert len(self.pull_request.revisions) == 2
1010 1010 source_vcs = self.source_repository.scm_instance()
1011 1011 removed_commit_id = source_vcs.commit_ids[-1]
1012 1012
1013 1013 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
1014 1014 # remove the if once that's sorted out.
1015 1015 if self.backend.alias == "git":
1016 1016 kwargs = {'branch_name': self.backend.default_branch_name}
1017 1017 else:
1018 1018 kwargs = {}
1019 1019 source_vcs.strip(removed_commit_id, **kwargs)
1020 1020
1021 1021 PullRequestModel().update_commits(self.pull_request)
1022 1022 assert len(self.pull_request.revisions) == 1
1023 1023 return removed_commit_id
1024 1024
1025 1025 def create_comment(self, linked_to=None):
1026 1026 comment = ChangesetCommentsModel().create(
1027 1027 text=u"Test comment",
1028 1028 repo=self.target_repository.repo_name,
1029 1029 user=self.author,
1030 1030 pull_request=self.pull_request)
1031 1031 assert comment.pull_request_version_id is None
1032 1032
1033 1033 if linked_to:
1034 1034 PullRequestModel()._link_comments_to_version(linked_to)
1035 1035
1036 1036 return comment
1037 1037
1038 1038 def create_inline_comment(
1039 1039 self, linked_to=None, line_no=u'n1', file_path='file_1'):
1040 1040 comment = ChangesetCommentsModel().create(
1041 1041 text=u"Test comment",
1042 1042 repo=self.target_repository.repo_name,
1043 1043 user=self.author,
1044 1044 line_no=line_no,
1045 1045 f_path=file_path,
1046 1046 pull_request=self.pull_request)
1047 1047 assert comment.pull_request_version_id is None
1048 1048
1049 1049 if linked_to:
1050 1050 PullRequestModel()._link_comments_to_version(linked_to)
1051 1051
1052 1052 return comment
1053 1053
1054 1054 def create_version_of_pull_request(self):
1055 1055 pull_request = self.create_pull_request()
1056 1056 version = PullRequestModel()._create_version_from_snapshot(
1057 1057 pull_request)
1058 1058 return version
1059 1059
1060 1060 def create_status_votes(self, status, *reviewers):
1061 1061 for reviewer in reviewers:
1062 1062 ChangesetStatusModel().set_status(
1063 1063 repo=self.pull_request.target_repo,
1064 1064 status=status,
1065 1065 user=reviewer.user_id,
1066 1066 pull_request=self.pull_request)
1067 1067
1068 1068 def set_mergeable(self, value):
1069 1069 if not self.mergeable_patcher:
1070 1070 self.mergeable_patcher = mock.patch.object(
1071 1071 VcsSettingsModel, 'get_general_settings')
1072 1072 self.mergeable_mock = self.mergeable_patcher.start()
1073 1073 self.mergeable_mock.return_value = {
1074 1074 'rhodecode_pr_merge_enabled': value}
1075 1075
1076 1076 def cleanup(self):
1077 1077 # In case the source repository is already cleaned up, the pull
1078 1078 # request will already be deleted.
1079 1079 pull_request = PullRequest().get(self.pull_request_id)
1080 1080 if pull_request:
1081 1081 PullRequestModel().delete(pull_request)
1082 1082 Session().commit()
1083 1083
1084 1084 if self.notification_patcher:
1085 1085 self.notification_patcher.stop()
1086 1086
1087 1087 if self.mergeable_patcher:
1088 1088 self.mergeable_patcher.stop()
1089 1089
1090 1090
1091 1091 @pytest.fixture
1092 1092 def user_admin(pylonsapp):
1093 1093 """
1094 1094 Provides the default admin test user as an instance of `db.User`.
1095 1095 """
1096 1096 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1097 1097 return user
1098 1098
1099 1099
1100 1100 @pytest.fixture
1101 1101 def user_regular(pylonsapp):
1102 1102 """
1103 1103 Provides the default regular test user as an instance of `db.User`.
1104 1104 """
1105 1105 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1106 1106 return user
1107 1107
1108 1108
1109 1109 @pytest.fixture
1110 1110 def user_util(request, pylonsapp):
1111 1111 """
1112 1112 Provides a wired instance of `UserUtility` with integrated cleanup.
1113 1113 """
1114 1114 utility = UserUtility(test_name=request.node.name)
1115 1115 request.addfinalizer(utility.cleanup)
1116 1116 return utility
1117 1117
1118 1118
1119 1119 # TODO: johbo: Split this up into utilities per domain or something similar
1120 1120 class UserUtility(object):
1121 1121
1122 1122 def __init__(self, test_name="test"):
1123 1123 self._test_name = test_name
1124 1124 self.fixture = Fixture()
1125 1125 self.repo_group_ids = []
1126 1126 self.user_ids = []
1127 1127 self.user_group_ids = []
1128 1128 self.user_repo_permission_ids = []
1129 1129 self.user_group_repo_permission_ids = []
1130 1130 self.user_repo_group_permission_ids = []
1131 1131 self.user_group_repo_group_permission_ids = []
1132 1132 self.user_user_group_permission_ids = []
1133 1133 self.user_group_user_group_permission_ids = []
1134 1134 self.user_permissions = []
1135 1135
1136 1136 def create_repo_group(
1137 1137 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1138 1138 group_name = "{prefix}_repogroup_{count}".format(
1139 1139 prefix=self._test_name,
1140 1140 count=len(self.repo_group_ids))
1141 1141 repo_group = self.fixture.create_repo_group(
1142 1142 group_name, cur_user=owner)
1143 1143 if auto_cleanup:
1144 1144 self.repo_group_ids.append(repo_group.group_id)
1145 1145 return repo_group
1146 1146
1147 1147 def create_user(self, auto_cleanup=True, **kwargs):
1148 1148 user_name = "{prefix}_user_{count}".format(
1149 1149 prefix=self._test_name,
1150 1150 count=len(self.user_ids))
1151 1151 user = self.fixture.create_user(user_name, **kwargs)
1152 1152 if auto_cleanup:
1153 1153 self.user_ids.append(user.user_id)
1154 1154 return user
1155 1155
1156 1156 def create_user_with_group(self):
1157 1157 user = self.create_user()
1158 1158 user_group = self.create_user_group(members=[user])
1159 1159 return user, user_group
1160 1160
1161 1161 def create_user_group(self, members=None, auto_cleanup=True, **kwargs):
1162 1162 group_name = "{prefix}_usergroup_{count}".format(
1163 1163 prefix=self._test_name,
1164 1164 count=len(self.user_group_ids))
1165 1165 user_group = self.fixture.create_user_group(group_name, **kwargs)
1166 1166 if auto_cleanup:
1167 1167 self.user_group_ids.append(user_group.users_group_id)
1168 1168 if members:
1169 1169 for user in members:
1170 1170 UserGroupModel().add_user_to_group(user_group, user)
1171 1171 return user_group
1172 1172
1173 1173 def grant_user_permission(self, user_name, permission_name):
1174 1174 self._inherit_default_user_permissions(user_name, False)
1175 1175 self.user_permissions.append((user_name, permission_name))
1176 1176
1177 1177 def grant_user_permission_to_repo_group(
1178 1178 self, repo_group, user, permission_name):
1179 1179 permission = RepoGroupModel().grant_user_permission(
1180 1180 repo_group, user, permission_name)
1181 1181 self.user_repo_group_permission_ids.append(
1182 1182 (repo_group.group_id, user.user_id))
1183 1183 return permission
1184 1184
1185 1185 def grant_user_group_permission_to_repo_group(
1186 1186 self, repo_group, user_group, permission_name):
1187 1187 permission = RepoGroupModel().grant_user_group_permission(
1188 1188 repo_group, user_group, permission_name)
1189 1189 self.user_group_repo_group_permission_ids.append(
1190 1190 (repo_group.group_id, user_group.users_group_id))
1191 1191 return permission
1192 1192
1193 1193 def grant_user_permission_to_repo(
1194 1194 self, repo, user, permission_name):
1195 1195 permission = RepoModel().grant_user_permission(
1196 1196 repo, user, permission_name)
1197 1197 self.user_repo_permission_ids.append(
1198 1198 (repo.repo_id, user.user_id))
1199 1199 return permission
1200 1200
1201 1201 def grant_user_group_permission_to_repo(
1202 1202 self, repo, user_group, permission_name):
1203 1203 permission = RepoModel().grant_user_group_permission(
1204 1204 repo, user_group, permission_name)
1205 1205 self.user_group_repo_permission_ids.append(
1206 1206 (repo.repo_id, user_group.users_group_id))
1207 1207 return permission
1208 1208
1209 1209 def grant_user_permission_to_user_group(
1210 1210 self, target_user_group, user, permission_name):
1211 1211 permission = UserGroupModel().grant_user_permission(
1212 1212 target_user_group, user, permission_name)
1213 1213 self.user_user_group_permission_ids.append(
1214 1214 (target_user_group.users_group_id, user.user_id))
1215 1215 return permission
1216 1216
1217 1217 def grant_user_group_permission_to_user_group(
1218 1218 self, target_user_group, user_group, permission_name):
1219 1219 permission = UserGroupModel().grant_user_group_permission(
1220 1220 target_user_group, user_group, permission_name)
1221 1221 self.user_group_user_group_permission_ids.append(
1222 1222 (target_user_group.users_group_id, user_group.users_group_id))
1223 1223 return permission
1224 1224
1225 1225 def revoke_user_permission(self, user_name, permission_name):
1226 1226 self._inherit_default_user_permissions(user_name, True)
1227 1227 UserModel().revoke_perm(user_name, permission_name)
1228 1228
1229 1229 def _inherit_default_user_permissions(self, user_name, value):
1230 1230 user = UserModel().get_by_username(user_name)
1231 1231 user.inherit_default_permissions = value
1232 1232 Session().add(user)
1233 1233 Session().commit()
1234 1234
1235 1235 def cleanup(self):
1236 1236 self._cleanup_permissions()
1237 1237 self._cleanup_repo_groups()
1238 1238 self._cleanup_user_groups()
1239 1239 self._cleanup_users()
1240 1240
1241 1241 def _cleanup_permissions(self):
1242 1242 if self.user_permissions:
1243 1243 for user_name, permission_name in self.user_permissions:
1244 1244 self.revoke_user_permission(user_name, permission_name)
1245 1245
1246 1246 for permission in self.user_repo_permission_ids:
1247 1247 RepoModel().revoke_user_permission(*permission)
1248 1248
1249 1249 for permission in self.user_group_repo_permission_ids:
1250 1250 RepoModel().revoke_user_group_permission(*permission)
1251 1251
1252 1252 for permission in self.user_repo_group_permission_ids:
1253 1253 RepoGroupModel().revoke_user_permission(*permission)
1254 1254
1255 1255 for permission in self.user_group_repo_group_permission_ids:
1256 1256 RepoGroupModel().revoke_user_group_permission(*permission)
1257 1257
1258 1258 for permission in self.user_user_group_permission_ids:
1259 1259 UserGroupModel().revoke_user_permission(*permission)
1260 1260
1261 1261 for permission in self.user_group_user_group_permission_ids:
1262 1262 UserGroupModel().revoke_user_group_permission(*permission)
1263 1263
1264 1264 def _cleanup_repo_groups(self):
1265 1265 def _repo_group_compare(first_group_id, second_group_id):
1266 1266 """
1267 1267 Gives higher priority to the groups with the most complex paths
1268 1268 """
1269 1269 first_group = RepoGroup.get(first_group_id)
1270 1270 second_group = RepoGroup.get(second_group_id)
1271 1271 first_group_parts = (
1272 1272 len(first_group.group_name.split('/')) if first_group else 0)
1273 1273 second_group_parts = (
1274 1274 len(second_group.group_name.split('/')) if second_group else 0)
1275 1275 return cmp(second_group_parts, first_group_parts)
1276 1276
1277 1277 sorted_repo_group_ids = sorted(
1278 1278 self.repo_group_ids, cmp=_repo_group_compare)
1279 1279 for repo_group_id in sorted_repo_group_ids:
1280 1280 self.fixture.destroy_repo_group(repo_group_id)
1281 1281
1282 1282 def _cleanup_user_groups(self):
1283 1283 def _user_group_compare(first_group_id, second_group_id):
1284 1284 """
1285 1285 Gives higher priority to the groups with the most complex paths
1286 1286 """
1287 1287 first_group = UserGroup.get(first_group_id)
1288 1288 second_group = UserGroup.get(second_group_id)
1289 1289 first_group_parts = (
1290 1290 len(first_group.users_group_name.split('/'))
1291 1291 if first_group else 0)
1292 1292 second_group_parts = (
1293 1293 len(second_group.users_group_name.split('/'))
1294 1294 if second_group else 0)
1295 1295 return cmp(second_group_parts, first_group_parts)
1296 1296
1297 1297 sorted_user_group_ids = sorted(
1298 1298 self.user_group_ids, cmp=_user_group_compare)
1299 1299 for user_group_id in sorted_user_group_ids:
1300 1300 self.fixture.destroy_user_group(user_group_id)
1301 1301
1302 1302 def _cleanup_users(self):
1303 1303 for user_id in self.user_ids:
1304 1304 self.fixture.destroy_user(user_id)
1305 1305
1306 1306
1307 1307 # TODO: Think about moving this into a pytest-pyro package and make it a
1308 1308 # pytest plugin
1309 1309 @pytest.hookimpl(tryfirst=True, hookwrapper=True)
1310 1310 def pytest_runtest_makereport(item, call):
1311 1311 """
1312 1312 Adding the remote traceback if the exception has this information.
1313 1313
1314 1314 Pyro4 attaches this information as the attribute `_pyroTraceback`
1315 1315 to the exception instance.
1316 1316 """
1317 1317 outcome = yield
1318 1318 report = outcome.get_result()
1319 1319 if call.excinfo:
1320 1320 _add_pyro_remote_traceback(report, call.excinfo.value)
1321 1321
1322 1322
1323 1323 def _add_pyro_remote_traceback(report, exc):
1324 1324 pyro_traceback = getattr(exc, '_pyroTraceback', None)
1325 1325
1326 1326 if pyro_traceback:
1327 1327 traceback = ''.join(pyro_traceback)
1328 1328 section = 'Pyro4 remote traceback ' + report.when
1329 1329 report.sections.append((section, traceback))
1330 1330
1331 1331
1332 1332 @pytest.fixture(scope='session')
1333 1333 def testrun():
1334 1334 return {
1335 1335 'uuid': uuid.uuid4(),
1336 1336 'start': datetime.datetime.utcnow().isoformat(),
1337 1337 'timestamp': int(time.time()),
1338 1338 }
1339 1339
1340 1340
1341 1341 @pytest.fixture(autouse=True)
1342 1342 def collect_appenlight_stats(request, testrun):
1343 1343 """
1344 1344 This fixture reports memory consumtion of single tests.
1345 1345
1346 1346 It gathers data based on `psutil` and sends them to Appenlight. The option
1347 1347 ``--ae`` has te be used to enable this fixture and the API key for your
1348 1348 application has to be provided in ``--ae-key``.
1349 1349 """
1350 1350 try:
1351 1351 # cygwin cannot have yet psutil support.
1352 1352 import psutil
1353 1353 except ImportError:
1354 1354 return
1355 1355
1356 1356 if not request.config.getoption('--appenlight'):
1357 1357 return
1358 1358 else:
1359 1359 # Only request the pylonsapp fixture if appenlight tracking is
1360 1360 # enabled. This will speed up a test run of unit tests by 2 to 3
1361 1361 # seconds if appenlight is not enabled.
1362 1362 pylonsapp = request.getfuncargvalue("pylonsapp")
1363 1363 url = '{}/api/logs'.format(request.config.getoption('--appenlight-url'))
1364 1364 client = AppenlightClient(
1365 1365 url=url,
1366 1366 api_key=request.config.getoption('--appenlight-api-key'),
1367 1367 namespace=request.node.nodeid,
1368 1368 request=str(testrun['uuid']),
1369 1369 testrun=testrun)
1370 1370
1371 1371 client.collect({
1372 1372 'message': "Starting",
1373 1373 })
1374 1374
1375 1375 server_and_port = pylonsapp.config['vcs.server']
1376 1376 server = create_vcsserver_proxy(server_and_port)
1377 1377 with server:
1378 1378 vcs_pid = server.get_pid()
1379 1379 server.run_gc()
1380 1380 vcs_process = psutil.Process(vcs_pid)
1381 1381 mem = vcs_process.memory_info()
1382 1382 client.tag_before('vcsserver.rss', mem.rss)
1383 1383 client.tag_before('vcsserver.vms', mem.vms)
1384 1384
1385 1385 test_process = psutil.Process()
1386 1386 mem = test_process.memory_info()
1387 1387 client.tag_before('test.rss', mem.rss)
1388 1388 client.tag_before('test.vms', mem.vms)
1389 1389
1390 1390 client.tag_before('time', time.time())
1391 1391
1392 1392 @request.addfinalizer
1393 1393 def send_stats():
1394 1394 client.tag_after('time', time.time())
1395 1395 with server:
1396 1396 gc_stats = server.run_gc()
1397 1397 for tag, value in gc_stats.items():
1398 1398 client.tag_after(tag, value)
1399 1399 mem = vcs_process.memory_info()
1400 1400 client.tag_after('vcsserver.rss', mem.rss)
1401 1401 client.tag_after('vcsserver.vms', mem.vms)
1402 1402
1403 1403 mem = test_process.memory_info()
1404 1404 client.tag_after('test.rss', mem.rss)
1405 1405 client.tag_after('test.vms', mem.vms)
1406 1406
1407 1407 client.collect({
1408 1408 'message': "Finished",
1409 1409 })
1410 1410 client.send_stats()
1411 1411
1412 1412 return client
1413 1413
1414 1414
1415 1415 class AppenlightClient():
1416 1416
1417 1417 url_template = '{url}?protocol_version=0.5'
1418 1418
1419 1419 def __init__(
1420 1420 self, url, api_key, add_server=True, add_timestamp=True,
1421 1421 namespace=None, request=None, testrun=None):
1422 1422 self.url = self.url_template.format(url=url)
1423 1423 self.api_key = api_key
1424 1424 self.add_server = add_server
1425 1425 self.add_timestamp = add_timestamp
1426 1426 self.namespace = namespace
1427 1427 self.request = request
1428 1428 self.server = socket.getfqdn(socket.gethostname())
1429 1429 self.tags_before = {}
1430 1430 self.tags_after = {}
1431 1431 self.stats = []
1432 1432 self.testrun = testrun or {}
1433 1433
1434 1434 def tag_before(self, tag, value):
1435 1435 self.tags_before[tag] = value
1436 1436
1437 1437 def tag_after(self, tag, value):
1438 1438 self.tags_after[tag] = value
1439 1439
1440 1440 def collect(self, data):
1441 1441 if self.add_server:
1442 1442 data.setdefault('server', self.server)
1443 1443 if self.add_timestamp:
1444 1444 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1445 1445 if self.namespace:
1446 1446 data.setdefault('namespace', self.namespace)
1447 1447 if self.request:
1448 1448 data.setdefault('request', self.request)
1449 1449 self.stats.append(data)
1450 1450
1451 1451 def send_stats(self):
1452 1452 tags = [
1453 1453 ('testrun', self.request),
1454 1454 ('testrun.start', self.testrun['start']),
1455 1455 ('testrun.timestamp', self.testrun['timestamp']),
1456 1456 ('test', self.namespace),
1457 1457 ]
1458 1458 for key, value in self.tags_before.items():
1459 1459 tags.append((key + '.before', value))
1460 1460 try:
1461 1461 delta = self.tags_after[key] - value
1462 1462 tags.append((key + '.delta', delta))
1463 1463 except Exception:
1464 1464 pass
1465 1465 for key, value in self.tags_after.items():
1466 1466 tags.append((key + '.after', value))
1467 1467 self.collect({
1468 1468 'message': "Collected tags",
1469 1469 'tags': tags,
1470 1470 })
1471 1471
1472 1472 response = requests.post(
1473 1473 self.url,
1474 1474 headers={
1475 1475 'X-appenlight-api-key': self.api_key},
1476 1476 json=self.stats,
1477 1477 )
1478 1478
1479 1479 if not response.status_code == 200:
1480 1480 pprint.pprint(self.stats)
1481 1481 print response.headers
1482 1482 print response.text
1483 1483 raise Exception('Sending to appenlight failed')
1484 1484
1485 1485
1486 1486 @pytest.fixture
1487 1487 def gist_util(request, pylonsapp):
1488 1488 """
1489 1489 Provides a wired instance of `GistUtility` with integrated cleanup.
1490 1490 """
1491 1491 utility = GistUtility()
1492 1492 request.addfinalizer(utility.cleanup)
1493 1493 return utility
1494 1494
1495 1495
1496 1496 class GistUtility(object):
1497 1497 def __init__(self):
1498 1498 self.fixture = Fixture()
1499 1499 self.gist_ids = []
1500 1500
1501 1501 def create_gist(self, **kwargs):
1502 1502 gist = self.fixture.create_gist(**kwargs)
1503 1503 self.gist_ids.append(gist.gist_id)
1504 1504 return gist
1505 1505
1506 1506 def cleanup(self):
1507 1507 for id_ in self.gist_ids:
1508 1508 self.fixture.destroy_gists(str(id_))
1509 1509
1510 1510
1511 1511 @pytest.fixture
1512 1512 def enabled_backends(request):
1513 1513 backends = request.config.option.backends
1514 1514 return backends[:]
1515 1515
1516 1516
1517 1517 @pytest.fixture
1518 1518 def settings_util(request):
1519 1519 """
1520 1520 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1521 1521 """
1522 1522 utility = SettingsUtility()
1523 1523 request.addfinalizer(utility.cleanup)
1524 1524 return utility
1525 1525
1526 1526
1527 1527 class SettingsUtility(object):
1528 1528 def __init__(self):
1529 1529 self.rhodecode_ui_ids = []
1530 1530 self.rhodecode_setting_ids = []
1531 1531 self.repo_rhodecode_ui_ids = []
1532 1532 self.repo_rhodecode_setting_ids = []
1533 1533
1534 1534 def create_repo_rhodecode_ui(
1535 1535 self, repo, section, value, key=None, active=True, cleanup=True):
1536 1536 key = key or hashlib.sha1(
1537 1537 '{}{}{}'.format(section, value, repo.repo_id)).hexdigest()
1538 1538
1539 1539 setting = RepoRhodeCodeUi()
1540 1540 setting.repository_id = repo.repo_id
1541 1541 setting.ui_section = section
1542 1542 setting.ui_value = value
1543 1543 setting.ui_key = key
1544 1544 setting.ui_active = active
1545 1545 Session().add(setting)
1546 1546 Session().commit()
1547 1547
1548 1548 if cleanup:
1549 1549 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1550 1550 return setting
1551 1551
1552 1552 def create_rhodecode_ui(
1553 1553 self, section, value, key=None, active=True, cleanup=True):
1554 1554 key = key or hashlib.sha1('{}{}'.format(section, value)).hexdigest()
1555 1555
1556 1556 setting = RhodeCodeUi()
1557 1557 setting.ui_section = section
1558 1558 setting.ui_value = value
1559 1559 setting.ui_key = key
1560 1560 setting.ui_active = active
1561 1561 Session().add(setting)
1562 1562 Session().commit()
1563 1563
1564 1564 if cleanup:
1565 1565 self.rhodecode_ui_ids.append(setting.ui_id)
1566 1566 return setting
1567 1567
1568 1568 def create_repo_rhodecode_setting(
1569 1569 self, repo, name, value, type_, cleanup=True):
1570 1570 setting = RepoRhodeCodeSetting(
1571 1571 repo.repo_id, key=name, val=value, type=type_)
1572 1572 Session().add(setting)
1573 1573 Session().commit()
1574 1574
1575 1575 if cleanup:
1576 1576 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1577 1577 return setting
1578 1578
1579 1579 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1580 1580 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1581 1581 Session().add(setting)
1582 1582 Session().commit()
1583 1583
1584 1584 if cleanup:
1585 1585 self.rhodecode_setting_ids.append(setting.app_settings_id)
1586 1586
1587 1587 return setting
1588 1588
1589 1589 def cleanup(self):
1590 1590 for id_ in self.rhodecode_ui_ids:
1591 1591 setting = RhodeCodeUi.get(id_)
1592 1592 Session().delete(setting)
1593 1593
1594 1594 for id_ in self.rhodecode_setting_ids:
1595 1595 setting = RhodeCodeSetting.get(id_)
1596 1596 Session().delete(setting)
1597 1597
1598 1598 for id_ in self.repo_rhodecode_ui_ids:
1599 1599 setting = RepoRhodeCodeUi.get(id_)
1600 1600 Session().delete(setting)
1601 1601
1602 1602 for id_ in self.repo_rhodecode_setting_ids:
1603 1603 setting = RepoRhodeCodeSetting.get(id_)
1604 1604 Session().delete(setting)
1605 1605
1606 1606 Session().commit()
1607 1607
1608 1608
1609 1609 @pytest.fixture
1610 1610 def no_notifications(request):
1611 1611 notification_patcher = mock.patch(
1612 1612 'rhodecode.model.notification.NotificationModel.create')
1613 1613 notification_patcher.start()
1614 1614 request.addfinalizer(notification_patcher.stop)
1615 1615
1616 1616
1617 1617 @pytest.fixture
1618 1618 def silence_action_logger(request):
1619 1619 notification_patcher = mock.patch(
1620 1620 'rhodecode.lib.utils.action_logger')
1621 1621 notification_patcher.start()
1622 1622 request.addfinalizer(notification_patcher.stop)
1623 1623
1624 1624
1625 1625 @pytest.fixture(scope='session')
1626 1626 def repeat(request):
1627 1627 """
1628 1628 The number of repetitions is based on this fixture.
1629 1629
1630 1630 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1631 1631 tests are not too slow in our default test suite.
1632 1632 """
1633 1633 return request.config.getoption('--repeat')
1634 1634
1635 1635
1636 1636 @pytest.fixture
1637 1637 def rhodecode_fixtures():
1638 1638 return Fixture()
1639 1639
1640 1640
1641 1641 @pytest.fixture
1642 1642 def request_stub():
1643 1643 """
1644 1644 Stub request object.
1645 1645 """
1646 1646 request = pyramid.testing.DummyRequest()
1647 1647 request.scheme = 'https'
1648 1648 return request
1649 1649
1650 1650
1651 1651 @pytest.fixture
1652 1652 def config_stub(request, request_stub):
1653 1653 """
1654 1654 Set up pyramid.testing and return the Configurator.
1655 1655 """
1656 1656 config = pyramid.testing.setUp(request=request_stub)
1657 1657
1658 1658 @request.addfinalizer
1659 1659 def cleanup():
1660 1660 pyramid.testing.tearDown()
1661 1661
1662 1662 return config
1663 1663
1664 1664
1665 1665 @pytest.fixture
1666 1666 def StubIntegrationType():
1667 1667 class _StubIntegrationType(IntegrationTypeBase):
1668 1668 """ Test integration type class """
1669 1669
1670 1670 key = 'test'
1671 1671 display_name = 'Test integration type'
1672 1672 description = 'A test integration type for testing'
1673 1673 icon = 'test_icon_html_image'
1674 1674
1675 1675 def __init__(self, settings):
1676 1676 super(_StubIntegrationType, self).__init__(settings)
1677 1677 self.sent_events = [] # for testing
1678 1678
1679 1679 def send_event(self, event):
1680 1680 self.sent_events.append(event)
1681 1681
1682 1682 def settings_schema(self):
1683 1683 class SettingsSchema(colander.Schema):
1684 1684 test_string_field = colander.SchemaNode(
1685 1685 colander.String(),
1686 1686 missing=colander.required,
1687 1687 title='test string field',
1688 1688 )
1689 1689 test_int_field = colander.SchemaNode(
1690 1690 colander.Int(),
1691 1691 title='some integer setting',
1692 1692 )
1693 1693 return SettingsSchema()
1694 1694
1695 1695
1696 1696 integration_type_registry.register_integration_type(_StubIntegrationType)
1697 1697 return _StubIntegrationType
1698 1698
1699 1699 @pytest.fixture
1700 1700 def stub_integration_settings():
1701 1701 return {
1702 1702 'test_string_field': 'some data',
1703 1703 'test_int_field': 100,
1704 1704 }
1705 1705
1706 1706
1707 1707 @pytest.fixture
1708 1708 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1709 1709 stub_integration_settings):
1710 1710 integration = IntegrationModel().create(
1711 1711 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1712 name='test repo integration', scope=repo_stub)
1712 name='test repo integration',
1713 repo=repo_stub, repo_group=None, child_repos_only=None)
1713 1714
1714 1715 @request.addfinalizer
1715 1716 def cleanup():
1716 1717 IntegrationModel().delete(integration)
1717 1718
1718 1719 return integration
1719 1720
1720 1721
1721 1722 @pytest.fixture
1722 1723 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1723 1724 stub_integration_settings):
1724 1725 integration = IntegrationModel().create(
1725 1726 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1726 name='test repogroup integration', scope=test_repo_group)
1727 name='test repogroup integration',
1728 repo=None, repo_group=test_repo_group, child_repos_only=True)
1729
1730 @request.addfinalizer
1731 def cleanup():
1732 IntegrationModel().delete(integration)
1733
1734 return integration
1735
1736
1737 @pytest.fixture
1738 def repogroup_recursive_integration_stub(request, test_repo_group,
1739 StubIntegrationType, stub_integration_settings):
1740 integration = IntegrationModel().create(
1741 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1742 name='test recursive repogroup integration',
1743 repo=None, repo_group=test_repo_group, child_repos_only=False)
1727 1744
1728 1745 @request.addfinalizer
1729 1746 def cleanup():
1730 1747 IntegrationModel().delete(integration)
1731 1748
1732 1749 return integration
1733 1750
1734 1751
1735 1752 @pytest.fixture
1736 1753 def global_integration_stub(request, StubIntegrationType,
1737 1754 stub_integration_settings):
1738 1755 integration = IntegrationModel().create(
1739 1756 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1740 name='test global integration', scope='global')
1757 name='test global integration',
1758 repo=None, repo_group=None, child_repos_only=None)
1741 1759
1742 1760 @request.addfinalizer
1743 1761 def cleanup():
1744 1762 IntegrationModel().delete(integration)
1745 1763
1746 1764 return integration
1747 1765
1748 1766
1749 1767 @pytest.fixture
1750 1768 def root_repos_integration_stub(request, StubIntegrationType,
1751 1769 stub_integration_settings):
1752 1770 integration = IntegrationModel().create(
1753 1771 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1754 name='test global integration', scope='root_repos')
1772 name='test global integration',
1773 repo=None, repo_group=None, child_repos_only=True)
1755 1774
1756 1775 @request.addfinalizer
1757 1776 def cleanup():
1758 1777 IntegrationModel().delete(integration)
1759 1778
1760 1779 return integration
General Comments 0
You need to be logged in to leave comments. Login now