##// END OF EJS Templates
events: re-organizate events handling....
marcink -
r1789:13d7e2ce default
parent child Browse files
Show More
@@ -1,76 +1,76 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 from rhodecode.lib import helpers as h
22 22 from rhodecode.lib.utils2 import safe_int
23 23
24 24
25 def reviewer_as_json(user, reasons, mandatory):
25 def reviewer_as_json(user, reasons=None, mandatory=False):
26 26 """
27 27 Returns json struct of a reviewer for frontend
28 28
29 29 :param user: the reviewer
30 30 :param reasons: list of strings of why they are reviewers
31 31 :param mandatory: bool, to set user as mandatory
32 32 """
33 33
34 34 return {
35 35 'user_id': user.user_id,
36 'reasons': reasons,
36 'reasons': reasons or [],
37 37 'mandatory': mandatory,
38 38 'username': user.username,
39 39 'firstname': user.firstname,
40 40 'lastname': user.lastname,
41 41 'gravatar_link': h.gravatar_url(user.email, 14),
42 42 }
43 43
44 44
45 45 def get_default_reviewers_data(
46 46 current_user, source_repo, source_commit, target_repo, target_commit):
47 47
48 48 """ Return json for default reviewers of a repository """
49 49
50 50 reasons = ['Default reviewer', 'Repository owner']
51 51 default = reviewer_as_json(
52 52 user=current_user, reasons=reasons, mandatory=False)
53 53
54 54 return {
55 55 'api_ver': 'v1', # define version for later possible schema upgrade
56 56 'reviewers': [default],
57 57 'rules': {},
58 58 'rules_data': {},
59 59 }
60 60
61 61
62 62 def validate_default_reviewers(review_members, reviewer_rules):
63 63 """
64 64 Function to validate submitted reviewers against the saved rules
65 65
66 66 """
67 67 reviewers = []
68 68 reviewer_by_id = {}
69 69 for r in review_members:
70 70 reviewer_user_id = safe_int(r['user_id'])
71 71 entry = (reviewer_user_id, r['reasons'], r['mandatory'])
72 72
73 73 reviewer_by_id[reviewer_user_id] = entry
74 74 reviewers.append(entry)
75 75
76 76 return reviewers
@@ -1,521 +1,527 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Pylons middleware initialization
23 23 """
24 24 import logging
25 25 from collections import OrderedDict
26 26
27 27 from paste.registry import RegistryManager
28 28 from paste.gzipper import make_gzip_middleware
29 29 from pylons.wsgiapp import PylonsApp
30 30 from pyramid.authorization import ACLAuthorizationPolicy
31 31 from pyramid.config import Configurator
32 32 from pyramid.settings import asbool, aslist
33 33 from pyramid.wsgi import wsgiapp
34 34 from pyramid.httpexceptions import (
35 35 HTTPException, HTTPError, HTTPInternalServerError, HTTPFound)
36 36 from pyramid.events import ApplicationCreated
37 37 from pyramid.renderers import render_to_response
38 38 from routes.middleware import RoutesMiddleware
39 39 import routes.util
40 40
41 41 import rhodecode
42 42
43 43 from rhodecode.model import meta
44 44 from rhodecode.config import patches
45 45 from rhodecode.config.routing import STATIC_FILE_PREFIX
46 46 from rhodecode.config.environment import (
47 47 load_environment, load_pyramid_environment)
48 48
49 49 from rhodecode.lib.vcs import VCSCommunicationError
50 50 from rhodecode.lib.exceptions import VCSServerUnavailable
51 51 from rhodecode.lib.middleware import csrf
52 52 from rhodecode.lib.middleware.appenlight import wrap_in_appenlight_if_enabled
53 53 from rhodecode.lib.middleware.error_handling import (
54 54 PylonsErrorHandlingMiddleware)
55 55 from rhodecode.lib.middleware.https_fixup import HttpsFixup
56 56 from rhodecode.lib.middleware.vcs import VCSMiddleware
57 57 from rhodecode.lib.plugins.utils import register_rhodecode_plugin
58 58 from rhodecode.lib.utils2 import aslist as rhodecode_aslist, AttributeDict
59 59 from rhodecode.subscribers import (
60 60 scan_repositories_if_enabled, write_js_routes_if_enabled,
61 61 write_metadata_if_needed)
62 62
63 63
64 64 log = logging.getLogger(__name__)
65 65
66 66
67 67 # this is used to avoid avoid the route lookup overhead in routesmiddleware
68 68 # for certain routes which won't go to pylons to - eg. static files, debugger
69 69 # it is only needed for the pylons migration and can be removed once complete
70 70 class SkippableRoutesMiddleware(RoutesMiddleware):
71 71 """ Routes middleware that allows you to skip prefixes """
72 72
73 73 def __init__(self, *args, **kw):
74 74 self.skip_prefixes = kw.pop('skip_prefixes', [])
75 75 super(SkippableRoutesMiddleware, self).__init__(*args, **kw)
76 76
77 77 def __call__(self, environ, start_response):
78 78 for prefix in self.skip_prefixes:
79 79 if environ['PATH_INFO'].startswith(prefix):
80 80 # added to avoid the case when a missing /_static route falls
81 81 # through to pylons and causes an exception as pylons is
82 82 # expecting wsgiorg.routingargs to be set in the environ
83 83 # by RoutesMiddleware.
84 84 if 'wsgiorg.routing_args' not in environ:
85 85 environ['wsgiorg.routing_args'] = (None, {})
86 86 return self.app(environ, start_response)
87 87
88 88 return super(SkippableRoutesMiddleware, self).__call__(
89 89 environ, start_response)
90 90
91 91
92 92 def make_app(global_conf, static_files=True, **app_conf):
93 93 """Create a Pylons WSGI application and return it
94 94
95 95 ``global_conf``
96 96 The inherited configuration for this application. Normally from
97 97 the [DEFAULT] section of the Paste ini file.
98 98
99 99 ``app_conf``
100 100 The application's local configuration. Normally specified in
101 101 the [app:<name>] section of the Paste ini file (where <name>
102 102 defaults to main).
103 103
104 104 """
105 105 # Apply compatibility patches
106 106 patches.kombu_1_5_1_python_2_7_11()
107 107 patches.inspect_getargspec()
108 108
109 109 # Configure the Pylons environment
110 110 config = load_environment(global_conf, app_conf)
111 111
112 112 # The Pylons WSGI app
113 113 app = PylonsApp(config=config)
114 114 if rhodecode.is_test:
115 115 app = csrf.CSRFDetector(app)
116 116
117 117 expected_origin = config.get('expected_origin')
118 118 if expected_origin:
119 119 # The API can be accessed from other Origins.
120 120 app = csrf.OriginChecker(app, expected_origin,
121 121 skip_urls=[routes.util.url_for('api')])
122 122
123 123 # Establish the Registry for this application
124 124 app = RegistryManager(app)
125 125
126 126 app.config = config
127 127
128 128 return app
129 129
130 130
131 131 def make_pyramid_app(global_config, **settings):
132 132 """
133 133 Constructs the WSGI application based on Pyramid and wraps the Pylons based
134 134 application.
135 135
136 136 Specials:
137 137
138 138 * We migrate from Pylons to Pyramid. While doing this, we keep both
139 139 frameworks functional. This involves moving some WSGI middlewares around
140 140 and providing access to some data internals, so that the old code is
141 141 still functional.
142 142
143 143 * The application can also be integrated like a plugin via the call to
144 144 `includeme`. This is accompanied with the other utility functions which
145 145 are called. Changing this should be done with great care to not break
146 146 cases when these fragments are assembled from another place.
147 147
148 148 """
149 149 # The edition string should be available in pylons too, so we add it here
150 150 # before copying the settings.
151 151 settings.setdefault('rhodecode.edition', 'Community Edition')
152 152
153 153 # As long as our Pylons application does expect "unprepared" settings, make
154 154 # sure that we keep an unmodified copy. This avoids unintentional change of
155 155 # behavior in the old application.
156 156 settings_pylons = settings.copy()
157 157
158 158 sanitize_settings_and_apply_defaults(settings)
159 159 config = Configurator(settings=settings)
160 160 add_pylons_compat_data(config.registry, global_config, settings_pylons)
161 161
162 162 load_pyramid_environment(global_config, settings)
163 163
164 164 includeme_first(config)
165 165 includeme(config)
166 166 pyramid_app = config.make_wsgi_app()
167 167 pyramid_app = wrap_app_in_wsgi_middlewares(pyramid_app, config)
168 168 pyramid_app.config = config
169 169
170 170 # creating the app uses a connection - return it after we are done
171 171 meta.Session.remove()
172 172
173 173 return pyramid_app
174 174
175 175
176 176 def make_not_found_view(config):
177 177 """
178 178 This creates the view which should be registered as not-found-view to
179 179 pyramid. Basically it contains of the old pylons app, converted to a view.
180 180 Additionally it is wrapped by some other middlewares.
181 181 """
182 182 settings = config.registry.settings
183 183 vcs_server_enabled = settings['vcs.server.enable']
184 184
185 185 # Make pylons app from unprepared settings.
186 186 pylons_app = make_app(
187 187 config.registry._pylons_compat_global_config,
188 188 **config.registry._pylons_compat_settings)
189 189 config.registry._pylons_compat_config = pylons_app.config
190 190
191 191 # Appenlight monitoring.
192 192 pylons_app, appenlight_client = wrap_in_appenlight_if_enabled(
193 193 pylons_app, settings)
194 194
195 195 # The pylons app is executed inside of the pyramid 404 exception handler.
196 196 # Exceptions which are raised inside of it are not handled by pyramid
197 197 # again. Therefore we add a middleware that invokes the error handler in
198 198 # case of an exception or error response. This way we return proper error
199 199 # HTML pages in case of an error.
200 200 reraise = (settings.get('debugtoolbar.enabled', False) or
201 201 rhodecode.disable_error_handler)
202 202 pylons_app = PylonsErrorHandlingMiddleware(
203 203 pylons_app, error_handler, reraise)
204 204
205 205 # The VCSMiddleware shall operate like a fallback if pyramid doesn't find a
206 206 # view to handle the request. Therefore it is wrapped around the pylons
207 207 # app. It has to be outside of the error handling otherwise error responses
208 208 # from the vcsserver are converted to HTML error pages. This confuses the
209 209 # command line tools and the user won't get a meaningful error message.
210 210 if vcs_server_enabled:
211 211 pylons_app = VCSMiddleware(
212 212 pylons_app, settings, appenlight_client, registry=config.registry)
213 213
214 214 # Convert WSGI app to pyramid view and return it.
215 215 return wsgiapp(pylons_app)
216 216
217 217
218 218 def add_pylons_compat_data(registry, global_config, settings):
219 219 """
220 220 Attach data to the registry to support the Pylons integration.
221 221 """
222 222 registry._pylons_compat_global_config = global_config
223 223 registry._pylons_compat_settings = settings
224 224
225 225
226 226 def error_handler(exception, request):
227 227 import rhodecode
228 228 from rhodecode.lib import helpers
229 229
230 230 rhodecode_title = rhodecode.CONFIG.get('rhodecode_title') or 'RhodeCode'
231 231
232 232 base_response = HTTPInternalServerError()
233 233 # prefer original exception for the response since it may have headers set
234 234 if isinstance(exception, HTTPException):
235 235 base_response = exception
236 236 elif isinstance(exception, VCSCommunicationError):
237 237 base_response = VCSServerUnavailable()
238 238
239 239 def is_http_error(response):
240 240 # error which should have traceback
241 241 return response.status_code > 499
242 242
243 243 if is_http_error(base_response):
244 244 log.exception(
245 245 'error occurred handling this request for path: %s', request.path)
246 246
247 247 c = AttributeDict()
248 248 c.error_message = base_response.status
249 249 c.error_explanation = base_response.explanation or str(base_response)
250 250 c.visual = AttributeDict()
251 251
252 252 c.visual.rhodecode_support_url = (
253 253 request.registry.settings.get('rhodecode_support_url') or
254 254 request.route_url('rhodecode_support')
255 255 )
256 256 c.redirect_time = 0
257 257 c.rhodecode_name = rhodecode_title
258 258 if not c.rhodecode_name:
259 259 c.rhodecode_name = 'Rhodecode'
260 260
261 261 c.causes = []
262 262 if hasattr(base_response, 'causes'):
263 263 c.causes = base_response.causes
264 264 c.messages = helpers.flash.pop_messages()
265 265
266 266 response = render_to_response(
267 267 '/errors/error_document.mako', {'c': c, 'h': helpers}, request=request,
268 268 response=base_response)
269 269
270 270 return response
271 271
272 272
273 273 def includeme(config):
274 274 settings = config.registry.settings
275 275
276 276 # plugin information
277 277 config.registry.rhodecode_plugins = OrderedDict()
278 278
279 279 config.add_directive(
280 280 'register_rhodecode_plugin', register_rhodecode_plugin)
281 281
282 282 if asbool(settings.get('appenlight', 'false')):
283 283 config.include('appenlight_client.ext.pyramid_tween')
284 284
285 285 # Includes which are required. The application would fail without them.
286 286 config.include('pyramid_mako')
287 287 config.include('pyramid_beaker')
288 288
289 289 config.include('rhodecode.authentication')
290 290 config.include('rhodecode.integrations')
291 291
292 292 # apps
293 293 config.include('rhodecode.apps._base')
294 294 config.include('rhodecode.apps.ops')
295 295
296 296 config.include('rhodecode.apps.admin')
297 297 config.include('rhodecode.apps.channelstream')
298 298 config.include('rhodecode.apps.login')
299 299 config.include('rhodecode.apps.home')
300 300 config.include('rhodecode.apps.repository')
301 301 config.include('rhodecode.apps.repo_group')
302 302 config.include('rhodecode.apps.search')
303 303 config.include('rhodecode.apps.user_profile')
304 304 config.include('rhodecode.apps.my_account')
305 305 config.include('rhodecode.apps.svn_support')
306 306
307 307 config.include('rhodecode.tweens')
308 308 config.include('rhodecode.api')
309 309
310 310 config.add_route(
311 311 'rhodecode_support', 'https://rhodecode.com/help/', static=True)
312 312
313 313 config.add_translation_dirs('rhodecode:i18n/')
314 314 settings['default_locale_name'] = settings.get('lang', 'en')
315 315
316 316 # Add subscribers.
317 317 config.add_subscriber(scan_repositories_if_enabled, ApplicationCreated)
318 318 config.add_subscriber(write_metadata_if_needed, ApplicationCreated)
319 319 config.add_subscriber(write_js_routes_if_enabled, ApplicationCreated)
320 320
321 # events
322 # TODO(marcink): this should be done when pyramid migration is finished
323 # config.add_subscriber(
324 # 'rhodecode.integrations.integrations_event_handler',
325 # 'rhodecode.events.RhodecodeEvent')
326
321 327 # Set the authorization policy.
322 328 authz_policy = ACLAuthorizationPolicy()
323 329 config.set_authorization_policy(authz_policy)
324 330
325 331 # Set the default renderer for HTML templates to mako.
326 332 config.add_mako_renderer('.html')
327 333
328 334 config.add_renderer(
329 335 name='json_ext',
330 336 factory='rhodecode.lib.ext_json_renderer.pyramid_ext_json')
331 337
332 338 # include RhodeCode plugins
333 339 includes = aslist(settings.get('rhodecode.includes', []))
334 340 for inc in includes:
335 341 config.include(inc)
336 342
337 343 # This is the glue which allows us to migrate in chunks. By registering the
338 344 # pylons based application as the "Not Found" view in Pyramid, we will
339 345 # fallback to the old application each time the new one does not yet know
340 346 # how to handle a request.
341 347 config.add_notfound_view(make_not_found_view(config))
342 348
343 349 if not settings.get('debugtoolbar.enabled', False):
344 350 # if no toolbar, then any exception gets caught and rendered
345 351 config.add_view(error_handler, context=Exception)
346 352
347 353 config.add_view(error_handler, context=HTTPError)
348 354
349 355
350 356 def includeme_first(config):
351 357 # redirect automatic browser favicon.ico requests to correct place
352 358 def favicon_redirect(context, request):
353 359 return HTTPFound(
354 360 request.static_path('rhodecode:public/images/favicon.ico'))
355 361
356 362 config.add_view(favicon_redirect, route_name='favicon')
357 363 config.add_route('favicon', '/favicon.ico')
358 364
359 365 def robots_redirect(context, request):
360 366 return HTTPFound(
361 367 request.static_path('rhodecode:public/robots.txt'))
362 368
363 369 config.add_view(robots_redirect, route_name='robots')
364 370 config.add_route('robots', '/robots.txt')
365 371
366 372 config.add_static_view(
367 373 '_static/deform', 'deform:static')
368 374 config.add_static_view(
369 375 '_static/rhodecode', path='rhodecode:public', cache_max_age=3600 * 24)
370 376
371 377
372 378 def wrap_app_in_wsgi_middlewares(pyramid_app, config):
373 379 """
374 380 Apply outer WSGI middlewares around the application.
375 381
376 382 Part of this has been moved up from the Pylons layer, so that the
377 383 data is also available if old Pylons code is hit through an already ported
378 384 view.
379 385 """
380 386 settings = config.registry.settings
381 387
382 388 # enable https redirects based on HTTP_X_URL_SCHEME set by proxy
383 389 pyramid_app = HttpsFixup(pyramid_app, settings)
384 390
385 391 # Add RoutesMiddleware to support the pylons compatibility tween during
386 392 # migration to pyramid.
387 393 pyramid_app = SkippableRoutesMiddleware(
388 394 pyramid_app, config.registry._pylons_compat_config['routes.map'],
389 395 skip_prefixes=(STATIC_FILE_PREFIX, '/_debug_toolbar'))
390 396
391 397 pyramid_app, _ = wrap_in_appenlight_if_enabled(pyramid_app, settings)
392 398
393 399 if settings['gzip_responses']:
394 400 pyramid_app = make_gzip_middleware(
395 401 pyramid_app, settings, compress_level=1)
396 402
397 403 # this should be the outer most middleware in the wsgi stack since
398 404 # middleware like Routes make database calls
399 405 def pyramid_app_with_cleanup(environ, start_response):
400 406 try:
401 407 return pyramid_app(environ, start_response)
402 408 finally:
403 409 # Dispose current database session and rollback uncommitted
404 410 # transactions.
405 411 meta.Session.remove()
406 412
407 413 # In a single threaded mode server, on non sqlite db we should have
408 414 # '0 Current Checked out connections' at the end of a request,
409 415 # if not, then something, somewhere is leaving a connection open
410 416 pool = meta.Base.metadata.bind.engine.pool
411 417 log.debug('sa pool status: %s', pool.status())
412 418
413 419 return pyramid_app_with_cleanup
414 420
415 421
416 422 def sanitize_settings_and_apply_defaults(settings):
417 423 """
418 424 Applies settings defaults and does all type conversion.
419 425
420 426 We would move all settings parsing and preparation into this place, so that
421 427 we have only one place left which deals with this part. The remaining parts
422 428 of the application would start to rely fully on well prepared settings.
423 429
424 430 This piece would later be split up per topic to avoid a big fat monster
425 431 function.
426 432 """
427 433
428 434 # Pyramid's mako renderer has to search in the templates folder so that the
429 435 # old templates still work. Ported and new templates are expected to use
430 436 # real asset specifications for the includes.
431 437 mako_directories = settings.setdefault('mako.directories', [
432 438 # Base templates of the original Pylons application
433 439 'rhodecode:templates',
434 440 ])
435 441 log.debug(
436 442 "Using the following Mako template directories: %s",
437 443 mako_directories)
438 444
439 445 # Default includes, possible to change as a user
440 446 pyramid_includes = settings.setdefault('pyramid.includes', [
441 447 'rhodecode.lib.middleware.request_wrapper',
442 448 ])
443 449 log.debug(
444 450 "Using the following pyramid.includes: %s",
445 451 pyramid_includes)
446 452
447 453 # TODO: johbo: Re-think this, usually the call to config.include
448 454 # should allow to pass in a prefix.
449 455 settings.setdefault('rhodecode.api.url', '/_admin/api')
450 456
451 457 # Sanitize generic settings.
452 458 _list_setting(settings, 'default_encoding', 'UTF-8')
453 459 _bool_setting(settings, 'is_test', 'false')
454 460 _bool_setting(settings, 'gzip_responses', 'false')
455 461
456 462 # Call split out functions that sanitize settings for each topic.
457 463 _sanitize_appenlight_settings(settings)
458 464 _sanitize_vcs_settings(settings)
459 465
460 466 return settings
461 467
462 468
463 469 def _sanitize_appenlight_settings(settings):
464 470 _bool_setting(settings, 'appenlight', 'false')
465 471
466 472
467 473 def _sanitize_vcs_settings(settings):
468 474 """
469 475 Applies settings defaults and does type conversion for all VCS related
470 476 settings.
471 477 """
472 478 _string_setting(settings, 'vcs.svn.compatible_version', '')
473 479 _string_setting(settings, 'git_rev_filter', '--all')
474 480 _string_setting(settings, 'vcs.hooks.protocol', 'http')
475 481 _string_setting(settings, 'vcs.scm_app_implementation', 'http')
476 482 _string_setting(settings, 'vcs.server', '')
477 483 _string_setting(settings, 'vcs.server.log_level', 'debug')
478 484 _string_setting(settings, 'vcs.server.protocol', 'http')
479 485 _bool_setting(settings, 'startup.import_repos', 'false')
480 486 _bool_setting(settings, 'vcs.hooks.direct_calls', 'false')
481 487 _bool_setting(settings, 'vcs.server.enable', 'true')
482 488 _bool_setting(settings, 'vcs.start_server', 'false')
483 489 _list_setting(settings, 'vcs.backends', 'hg, git, svn')
484 490 _int_setting(settings, 'vcs.connection_timeout', 3600)
485 491
486 492 # Support legacy values of vcs.scm_app_implementation. Legacy
487 493 # configurations may use 'rhodecode.lib.middleware.utils.scm_app_http'
488 494 # which is now mapped to 'http'.
489 495 scm_app_impl = settings['vcs.scm_app_implementation']
490 496 if scm_app_impl == 'rhodecode.lib.middleware.utils.scm_app_http':
491 497 settings['vcs.scm_app_implementation'] = 'http'
492 498
493 499
494 500 def _int_setting(settings, name, default):
495 501 settings[name] = int(settings.get(name, default))
496 502
497 503
498 504 def _bool_setting(settings, name, default):
499 505 input = settings.get(name, default)
500 506 if isinstance(input, unicode):
501 507 input = input.encode('utf8')
502 508 settings[name] = asbool(input)
503 509
504 510
505 511 def _list_setting(settings, name, default):
506 512 raw_value = settings.get(name, default)
507 513
508 514 old_separator = ','
509 515 if old_separator in raw_value:
510 516 # If we get a comma separated list, pass it to our own function.
511 517 settings[name] = rhodecode_aslist(raw_value, sep=old_separator)
512 518 else:
513 519 # Otherwise we assume it uses pyramids space/newline separation.
514 520 settings[name] = aslist(raw_value)
515 521
516 522
517 523 def _string_setting(settings, name, default, lower=True):
518 524 value = settings.get(name, default)
519 525 if lower:
520 526 value = value.lower()
521 527 settings[name] = value
@@ -1,79 +1,82 b''
1 1 # Copyright (C) 2016-2017 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 import logging
20 20 from pyramid.threadlocal import get_current_registry
21 from rhodecode.events.base import RhodecodeEvent
22
21 23
22 24 log = logging.getLogger(__name__)
23 25
24 26
25 27 def trigger(event, registry=None):
26 28 """
27 29 Helper method to send an event. This wraps the pyramid logic to send an
28 30 event.
29 31 """
30 32 # For the first step we are using pyramids thread locals here. If the
31 33 # event mechanism works out as a good solution we should think about
32 34 # passing the registry as an argument to get rid of it.
33 35 registry = registry or get_current_registry()
34 36 registry.notify(event)
35 log.debug('event %s triggered', event)
37 log.debug('event %s triggered using registry %s', event, registry)
36 38
37 39 # Until we can work around the problem that VCS operations do not have a
38 40 # pyramid context to work with, we send the events to integrations directly
39 41
40 42 # Later it will be possible to use regular pyramid subscribers ie:
41 # config.add_subscriber(integrations_event_handler, RhodecodeEvent)
43 # config.add_subscriber(
44 # 'rhodecode.integrations.integrations_event_handler',
45 # 'rhodecode.events.RhodecodeEvent')
46 # trigger(event, request.registry)
47
42 48 from rhodecode.integrations import integrations_event_handler
43 49 if isinstance(event, RhodecodeEvent):
44 50 integrations_event_handler(event)
45 51
46
47 from rhodecode.events.base import RhodecodeEvent
48
49 52 from rhodecode.events.user import ( # noqa
50 53 UserPreCreate,
51 54 UserPostCreate,
52 55 UserPreUpdate,
53 56 UserRegistered
54 57 )
55 58
56 59 from rhodecode.events.repo import ( # noqa
57 60 RepoEvent,
58 61 RepoPreCreateEvent, RepoCreateEvent,
59 62 RepoPreDeleteEvent, RepoDeleteEvent,
60 63 RepoPrePushEvent, RepoPushEvent,
61 64 RepoPrePullEvent, RepoPullEvent,
62 65 )
63 66
64 67 from rhodecode.events.repo_group import ( # noqa
65 68 RepoGroupEvent,
66 69 RepoGroupCreateEvent,
67 70 RepoGroupUpdateEvent,
68 71 RepoGroupDeleteEvent,
69 72 )
70 73
71 74 from rhodecode.events.pullrequest import ( # noqa
72 75 PullRequestEvent,
73 76 PullRequestCreateEvent,
74 77 PullRequestUpdateEvent,
75 78 PullRequestCommentEvent,
76 79 PullRequestReviewEvent,
77 80 PullRequestMergeEvent,
78 81 PullRequestCloseEvent,
79 82 )
@@ -1,103 +1,102 b''
1 1 # Copyright (C) 2016-2017 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18 import logging
19 19
20 20 from datetime import datetime
21 21 from pyramid.threadlocal import get_current_request
22 22 from rhodecode.lib.utils2 import AttributeDict
23 23
24 24
25 25 # this is a user object to be used for events caused by the system (eg. shell)
26 26 SYSTEM_USER = AttributeDict(dict(
27 27 username='__SYSTEM__',
28 28 user_id='__SYSTEM_ID__'
29 29 ))
30 30
31 31 log = logging.getLogger(__name__)
32 32
33 33
34 34 class RhodecodeEvent(object):
35 35 """
36 Base event class for all Rhodecode events
36 Base event class for all RhodeCode events
37 37 """
38 38 name = "RhodeCodeEvent"
39 39
40 def __init__(self):
41 self.request = get_current_request()
40 def __init__(self, request=None):
41 self.request = request or get_current_request()
42 42 self.utc_timestamp = datetime.utcnow()
43 43
44 44 @property
45 45 def auth_user(self):
46 46 if not self.request:
47 47 return
48 48
49 49 user = getattr(self.request, 'user', None)
50 50 if user:
51 51 return user
52 52
53 53 api_user = getattr(self.request, 'rpc_user', None)
54 54 if api_user:
55 55 return api_user
56 56
57 57 @property
58 58 def actor(self):
59 59 auth_user = self.auth_user
60 60
61 61 if auth_user:
62 62 instance = auth_user.get_instance()
63 63 if not instance:
64 64 return AttributeDict(dict(
65 65 username=auth_user.username,
66 66 user_id=auth_user.user_id,
67 67 ))
68 68 return instance
69 69
70 70 return SYSTEM_USER
71 71
72 72 @property
73 73 def actor_ip(self):
74 74 auth_user = self.auth_user
75 75 if auth_user:
76 76 return auth_user.ip_addr
77 77 return '<no ip available>'
78 78
79 79 @property
80 80 def server_url(self):
81 81 default = '<no server_url available>'
82 82 if self.request:
83 from rhodecode.lib import helpers as h
84 83 try:
85 return h.route_url('home')
84 return self.request.route_url('home')
86 85 except Exception:
87 86 log.exception('Failed to fetch URL for server')
88 87 return default
89 88
90 89 return default
91 90
92 91 def as_dict(self):
93 92 data = {
94 93 'name': self.name,
95 94 'utc_timestamp': self.utc_timestamp,
96 95 'actor_ip': self.actor_ip,
97 96 'actor': {
98 97 'username': self.actor.username,
99 98 'user_id': self.actor.user_id
100 99 },
101 100 'server_url': self.server_url
102 101 }
103 102 return data
@@ -1,141 +1,142 b''
1 1 # Copyright (C) 2016-2017 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 import logging
20 20
21 21 from rhodecode.translation import lazy_ugettext
22 22 from rhodecode.events.repo import (
23 23 RepoEvent, _commits_as_dict, _issues_as_dict)
24 24
25 25 log = logging.getLogger(__name__)
26 26
27 27
28 28 class PullRequestEvent(RepoEvent):
29 29 """
30 30 Base class for pull request events.
31 31
32 32 :param pullrequest: a :class:`PullRequest` instance
33 33 """
34 34
35 35 def __init__(self, pullrequest):
36 36 super(PullRequestEvent, self).__init__(pullrequest.target_repo)
37 37 self.pullrequest = pullrequest
38 38
39 39 def as_dict(self):
40 40 from rhodecode.model.pull_request import PullRequestModel
41 41 data = super(PullRequestEvent, self).as_dict()
42 42
43 43 commits = _commits_as_dict(
44 self,
44 45 commit_ids=self.pullrequest.revisions,
45 46 repos=[self.pullrequest.source_repo]
46 47 )
47 48 issues = _issues_as_dict(commits)
48 49
49 50 data.update({
50 51 'pullrequest': {
51 52 'title': self.pullrequest.title,
52 53 'issues': issues,
53 54 'pull_request_id': self.pullrequest.pull_request_id,
54 55 'url': PullRequestModel().get_url(self.pullrequest),
55 56 'permalink_url': PullRequestModel().get_url(
56 57 self.pullrequest, permalink=True),
57 58 'status': self.pullrequest.calculated_review_status(),
58 59 'commits': commits,
59 60 }
60 61 })
61 62 return data
62 63
63 64
64 65 class PullRequestCreateEvent(PullRequestEvent):
65 66 """
66 67 An instance of this class is emitted as an :term:`event` after a pull
67 68 request is created.
68 69 """
69 70 name = 'pullrequest-create'
70 71 display_name = lazy_ugettext('pullrequest created')
71 72
72 73
73 74 class PullRequestCloseEvent(PullRequestEvent):
74 75 """
75 76 An instance of this class is emitted as an :term:`event` after a pull
76 77 request is closed.
77 78 """
78 79 name = 'pullrequest-close'
79 80 display_name = lazy_ugettext('pullrequest closed')
80 81
81 82
82 83 class PullRequestUpdateEvent(PullRequestEvent):
83 84 """
84 85 An instance of this class is emitted as an :term:`event` after a pull
85 86 request's commits have been updated.
86 87 """
87 88 name = 'pullrequest-update'
88 89 display_name = lazy_ugettext('pullrequest commits updated')
89 90
90 91
91 92 class PullRequestReviewEvent(PullRequestEvent):
92 93 """
93 94 An instance of this class is emitted as an :term:`event` after a pull
94 95 request review has changed.
95 96 """
96 97 name = 'pullrequest-review'
97 98 display_name = lazy_ugettext('pullrequest review changed')
98 99
99 100
100 101 class PullRequestMergeEvent(PullRequestEvent):
101 102 """
102 103 An instance of this class is emitted as an :term:`event` after a pull
103 104 request is merged.
104 105 """
105 106 name = 'pullrequest-merge'
106 107 display_name = lazy_ugettext('pullrequest merged')
107 108
108 109
109 110 class PullRequestCommentEvent(PullRequestEvent):
110 111 """
111 112 An instance of this class is emitted as an :term:`event` after a pull
112 113 request comment is created.
113 114 """
114 115 name = 'pullrequest-comment'
115 116 display_name = lazy_ugettext('pullrequest commented')
116 117
117 118 def __init__(self, pullrequest, comment):
118 119 super(PullRequestCommentEvent, self).__init__(pullrequest)
119 120 self.comment = comment
120 121
121 122 def as_dict(self):
122 123 from rhodecode.model.comment import CommentsModel
123 124 data = super(PullRequestCommentEvent, self).as_dict()
124 125
125 126 status = None
126 127 if self.comment.status_change:
127 128 status = self.comment.status_change[0].status
128 129
129 130 data.update({
130 131 'comment': {
131 132 'status': status,
132 133 'text': self.comment.text,
133 134 'type': self.comment.comment_type,
134 135 'file': self.comment.f_path,
135 136 'line': self.comment.line_no,
136 137 'url': CommentsModel().get_url(self.comment),
137 138 'permalink_url': CommentsModel().get_url(
138 139 self.comment, permalink=True),
139 140 }
140 141 })
141 142 return data
@@ -1,274 +1,277 b''
1 1 # Copyright (C) 2016-2017 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 import collections
20 20 import logging
21 21
22 22 from rhodecode.translation import lazy_ugettext
23 23 from rhodecode.model.db import User, Repository, Session
24 24 from rhodecode.events.base import RhodecodeEvent
25 25 from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError
26 26
27 27 log = logging.getLogger(__name__)
28 28
29 29
30 def _commits_as_dict(commit_ids, repos):
30 def _commits_as_dict(event, commit_ids, repos):
31 31 """
32 32 Helper function to serialize commit_ids
33 33
34 :param event: class calling this method
34 35 :param commit_ids: commits to get
35 36 :param repos: list of repos to check
36 37 """
37 38 from rhodecode.lib.utils2 import extract_mentioned_users
38 39 from rhodecode.lib.helpers import (
39 40 urlify_commit_message, process_patterns, chop_at_smart)
40 41 from rhodecode.model.repo import RepoModel
41 42
42 43 if not repos:
43 44 raise Exception('no repo defined')
44 45
45 46 if not isinstance(repos, (tuple, list)):
46 47 repos = [repos]
47 48
48 49 if not commit_ids:
49 50 return []
50 51
51 52 needed_commits = list(commit_ids)
52 53
53 54 commits = []
54 55 reviewers = []
55 56 for repo in repos:
56 57 if not needed_commits:
57 58 return commits # return early if we have the commits we need
58 59
59 60 vcs_repo = repo.scm_instance(cache=False)
60 61 try:
61 62 # use copy of needed_commits since we modify it while iterating
62 63 for commit_id in list(needed_commits):
63 64 try:
64 65 cs = vcs_repo.get_changeset(commit_id)
65 66 except CommitDoesNotExistError:
66 67 continue # maybe its in next repo
67 68
68 69 cs_data = cs.__json__()
69 70 cs_data['mentions'] = extract_mentioned_users(cs_data['message'])
70 71 cs_data['reviewers'] = reviewers
71 72 cs_data['url'] = RepoModel().get_commit_url(
72 repo, cs_data['raw_id'])
73 repo, cs_data['raw_id'], request=event.request)
73 74 cs_data['permalink_url'] = RepoModel().get_commit_url(
74 repo, cs_data['raw_id'], permalink=True)
75 repo, cs_data['raw_id'], request=event.request, permalink=True)
75 76 urlified_message, issues_data = process_patterns(
76 77 cs_data['message'], repo.repo_name)
77 78 cs_data['issues'] = issues_data
78 79 cs_data['message_html'] = urlify_commit_message(
79 80 cs_data['message'], repo.repo_name)
80 81 cs_data['message_html_title'] = chop_at_smart(
81 82 cs_data['message'], '\n', suffix_if_chopped='...')
82 83 commits.append(cs_data)
83 84
84 85 needed_commits.remove(commit_id)
85 86
86 87 except Exception as e:
87 88 log.exception(e)
88 89 # we don't send any commits when crash happens, only full list
89 90 # matters we short circuit then.
90 91 return []
91 92
92 93 missing_commits = set(commit_ids) - set(c['raw_id'] for c in commits)
93 94 if missing_commits:
94 95 log.error('missing commits: %s' % ', '.join(missing_commits))
95 96
96 97 return commits
97 98
98 99
99 100 def _issues_as_dict(commits):
100 101 """ Helper function to serialize issues from commits """
101 102 issues = {}
102 103 for commit in commits:
103 104 for issue in commit['issues']:
104 105 issues[issue['id']] = issue
105 106 return issues
106 107
107 108
108 109 class RepoEvent(RhodecodeEvent):
109 110 """
110 111 Base class for events acting on a repository.
111 112
112 113 :param repo: a :class:`Repository` instance
113 114 """
114 115
115 116 def __init__(self, repo):
116 117 super(RepoEvent, self).__init__()
117 118 self.repo = repo
118 119
119 120 def as_dict(self):
120 121 from rhodecode.model.repo import RepoModel
121 122 data = super(RepoEvent, self).as_dict()
122 123 extra_fields = collections.OrderedDict()
123 124 for field in self.repo.extra_fields:
124 125 extra_fields[field.field_key] = field.field_value
125 126
126 127 data.update({
127 128 'repo': {
128 129 'repo_id': self.repo.repo_id,
129 130 'repo_name': self.repo.repo_name,
130 131 'repo_type': self.repo.repo_type,
131 'url': RepoModel().get_url(self.repo),
132 'permalink_url': RepoModel().get_url(self.repo, permalink=True),
132 'url': RepoModel().get_url(
133 self.repo, request=self.request),
134 'permalink_url': RepoModel().get_url(
135 self.repo, request=self.request, permalink=True),
133 136 'extra_fields': extra_fields
134 137 }
135 138 })
136 139 return data
137 140
138 141
139 142 class RepoPreCreateEvent(RepoEvent):
140 143 """
141 144 An instance of this class is emitted as an :term:`event` before a repo is
142 145 created.
143 146 """
144 147 name = 'repo-pre-create'
145 148 display_name = lazy_ugettext('repository pre create')
146 149
147 150
148 151 class RepoCreateEvent(RepoEvent):
149 152 """
150 153 An instance of this class is emitted as an :term:`event` whenever a repo is
151 154 created.
152 155 """
153 156 name = 'repo-create'
154 157 display_name = lazy_ugettext('repository created')
155 158
156 159
157 160 class RepoPreDeleteEvent(RepoEvent):
158 161 """
159 162 An instance of this class is emitted as an :term:`event` whenever a repo is
160 163 created.
161 164 """
162 165 name = 'repo-pre-delete'
163 166 display_name = lazy_ugettext('repository pre delete')
164 167
165 168
166 169 class RepoDeleteEvent(RepoEvent):
167 170 """
168 171 An instance of this class is emitted as an :term:`event` whenever a repo is
169 172 created.
170 173 """
171 174 name = 'repo-delete'
172 175 display_name = lazy_ugettext('repository deleted')
173 176
174 177
175 178 class RepoVCSEvent(RepoEvent):
176 179 """
177 180 Base class for events triggered by the VCS
178 181 """
179 182 def __init__(self, repo_name, extras):
180 183 self.repo = Repository.get_by_repo_name(repo_name)
181 184 if not self.repo:
182 185 raise Exception('repo by this name %s does not exist' % repo_name)
183 186 self.extras = extras
184 187 super(RepoVCSEvent, self).__init__(self.repo)
185 188
186 189 @property
187 190 def actor(self):
188 191 if self.extras.get('username'):
189 192 return User.get_by_username(self.extras['username'])
190 193
191 194 @property
192 195 def actor_ip(self):
193 196 if self.extras.get('ip'):
194 197 return self.extras['ip']
195 198
196 199 @property
197 200 def server_url(self):
198 201 if self.extras.get('server_url'):
199 202 return self.extras['server_url']
200 203
201 204
202 205 class RepoPrePullEvent(RepoVCSEvent):
203 206 """
204 207 An instance of this class is emitted as an :term:`event` before commits
205 208 are pulled from a repo.
206 209 """
207 210 name = 'repo-pre-pull'
208 211 display_name = lazy_ugettext('repository pre pull')
209 212
210 213
211 214 class RepoPullEvent(RepoVCSEvent):
212 215 """
213 216 An instance of this class is emitted as an :term:`event` after commits
214 217 are pulled from a repo.
215 218 """
216 219 name = 'repo-pull'
217 220 display_name = lazy_ugettext('repository pull')
218 221
219 222
220 223 class RepoPrePushEvent(RepoVCSEvent):
221 224 """
222 225 An instance of this class is emitted as an :term:`event` before commits
223 226 are pushed to a repo.
224 227 """
225 228 name = 'repo-pre-push'
226 229 display_name = lazy_ugettext('repository pre push')
227 230
228 231
229 232 class RepoPushEvent(RepoVCSEvent):
230 233 """
231 234 An instance of this class is emitted as an :term:`event` after commits
232 235 are pushed to a repo.
233 236
234 237 :param extras: (optional) dict of data from proxied VCS actions
235 238 """
236 239 name = 'repo-push'
237 240 display_name = lazy_ugettext('repository push')
238 241
239 242 def __init__(self, repo_name, pushed_commit_ids, extras):
240 243 super(RepoPushEvent, self).__init__(repo_name, extras)
241 244 self.pushed_commit_ids = pushed_commit_ids
242 245
243 246 def as_dict(self):
244 247 data = super(RepoPushEvent, self).as_dict()
245 248
246 249 def branch_url(branch_name):
247 250 return '{}/changelog?branch={}'.format(
248 251 data['repo']['url'], branch_name)
249 252
250 253 commits = _commits_as_dict(
251 commit_ids=self.pushed_commit_ids, repos=[self.repo])
254 self, commit_ids=self.pushed_commit_ids, repos=[self.repo])
252 255
253 256 last_branch = None
254 257 for commit in reversed(commits):
255 258 commit['branch'] = commit['branch'] or last_branch
256 259 last_branch = commit['branch']
257 260 issues = _issues_as_dict(commits)
258 261
259 262 branches = set(
260 263 commit['branch'] for commit in commits if commit['branch'])
261 264 branches = [
262 265 {
263 266 'name': branch,
264 267 'url': branch_url(branch)
265 268 }
266 269 for branch in branches
267 270 ]
268 271
269 272 data['push'] = {
270 273 'commits': commits,
271 274 'issues': issues,
272 275 'branches': branches,
273 276 }
274 277 return data
@@ -1,213 +1,216 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 Model for integrations
24 24 """
25 25
26 26
27 27 import logging
28 28
29 29 from sqlalchemy import or_, and_
30 30
31 31 import rhodecode
32 32 from rhodecode import events
33 33 from rhodecode.lib.caching_query import FromCache
34 34 from rhodecode.model import BaseModel
35 35 from rhodecode.model.db import Integration, Repository, RepoGroup
36 36 from rhodecode.integrations import integration_type_registry
37 37
38 38 log = logging.getLogger(__name__)
39 39
40 40
41 41 class IntegrationModel(BaseModel):
42 42
43 43 cls = Integration
44 44
45 45 def __get_integration(self, integration):
46 46 if isinstance(integration, Integration):
47 47 return integration
48 48 elif isinstance(integration, (int, long)):
49 49 return self.sa.query(Integration).get(integration)
50 50 else:
51 51 if integration:
52 52 raise Exception('integration must be int, long or Instance'
53 53 ' of Integration got %s' % type(integration))
54 54
55 55 def create(self, IntegrationType, name, enabled, repo, repo_group,
56 56 child_repos_only, settings):
57 57 """ Create an IntegrationType integration """
58 58 integration = Integration()
59 59 integration.integration_type = IntegrationType.key
60 60 self.sa.add(integration)
61 61 self.update_integration(integration, name, enabled, repo, repo_group,
62 62 child_repos_only, settings)
63 63 self.sa.commit()
64 64 return integration
65 65
66 66 def update_integration(self, integration, name, enabled, repo, repo_group,
67 67 child_repos_only, settings):
68 68 integration = self.__get_integration(integration)
69 69
70 70 integration.repo = repo
71 71 integration.repo_group = repo_group
72 72 integration.child_repos_only = child_repos_only
73 73 integration.name = name
74 74 integration.enabled = enabled
75 75 integration.settings = settings
76 76
77 77 return integration
78 78
79 79 def delete(self, integration):
80 80 integration = self.__get_integration(integration)
81 81 if integration:
82 82 self.sa.delete(integration)
83 83 return True
84 84 return False
85 85
86 86 def get_integration_handler(self, integration):
87 87 TypeClass = integration_type_registry.get(integration.integration_type)
88 88 if not TypeClass:
89 89 log.error('No class could be found for integration type: {}'.format(
90 90 integration.integration_type))
91 91 return None
92 92
93 93 return TypeClass(integration.settings)
94 94
95 95 def send_event(self, integration, event):
96 96 """ Send an event to an integration """
97 97 handler = self.get_integration_handler(integration)
98 98 if handler:
99 log.debug(
100 'events: sending event %s on integration %s using handler %s',
101 event, integration, handler)
99 102 handler.send_event(event)
100 103
101 104 def get_integrations(self, scope, IntegrationType=None):
102 105 """
103 106 Return integrations for a scope, which must be one of:
104 107
105 108 'all' - every integration, global/repogroup/repo
106 109 'global' - global integrations only
107 110 <Repository> instance - integrations for this repo only
108 111 <RepoGroup> instance - integrations for this repogroup only
109 112 """
110 113
111 114 if isinstance(scope, Repository):
112 115 query = self.sa.query(Integration).filter(
113 116 Integration.repo==scope)
114 117 elif isinstance(scope, RepoGroup):
115 118 query = self.sa.query(Integration).filter(
116 119 Integration.repo_group==scope)
117 120 elif scope == 'global':
118 121 # global integrations
119 122 query = self.sa.query(Integration).filter(
120 123 and_(Integration.repo_id==None, Integration.repo_group_id==None)
121 124 )
122 125 elif scope == 'root-repos':
123 126 query = self.sa.query(Integration).filter(
124 127 and_(Integration.repo_id==None,
125 128 Integration.repo_group_id==None,
126 129 Integration.child_repos_only==True)
127 130 )
128 131 elif scope == 'all':
129 132 query = self.sa.query(Integration)
130 133 else:
131 134 raise Exception(
132 135 "invalid `scope`, must be one of: "
133 136 "['global', 'all', <Repository>, <RepoGroup>]")
134 137
135 138 if IntegrationType is not None:
136 139 query = query.filter(
137 140 Integration.integration_type==IntegrationType.key)
138 141
139 142 result = []
140 143 for integration in query.all():
141 144 IntType = integration_type_registry.get(integration.integration_type)
142 145 result.append((IntType, integration))
143 146 return result
144 147
145 148 def get_for_event(self, event, cache=False):
146 149 """
147 150 Get integrations that match an event
148 151 """
149 152 query = self.sa.query(
150 153 Integration
151 154 ).filter(
152 155 Integration.enabled==True
153 156 )
154 157
155 158 global_integrations_filter = and_(
156 159 Integration.repo_id==None,
157 160 Integration.repo_group_id==None,
158 161 Integration.child_repos_only==False,
159 162 )
160 163
161 164 if isinstance(event, events.RepoEvent):
162 165 root_repos_integrations_filter = and_(
163 166 Integration.repo_id==None,
164 167 Integration.repo_group_id==None,
165 168 Integration.child_repos_only==True,
166 169 )
167 170
168 171 clauses = [
169 172 global_integrations_filter,
170 173 ]
171 174
172 175 # repo integrations
173 176 if event.repo.repo_id: # pre create events dont have a repo_id yet
174 177 clauses.append(
175 178 Integration.repo_id==event.repo.repo_id
176 179 )
177 180
178 181 if event.repo.group:
179 182 clauses.append(
180 183 and_(
181 184 Integration.repo_group_id==event.repo.group.group_id,
182 185 Integration.child_repos_only==True
183 186 )
184 187 )
185 188 # repo group cascade to kids
186 189 clauses.append(
187 190 and_(
188 191 Integration.repo_group_id.in_(
189 192 [group.group_id for group in
190 193 event.repo.groups_with_parents]
191 194 ),
192 195 Integration.child_repos_only==False
193 196 )
194 197 )
195 198
196 199
197 200 if not event.repo.group: # root repo
198 201 clauses.append(root_repos_integrations_filter)
199 202
200 203 query = query.filter(or_(*clauses))
201 204
202 205 if cache:
203 206 cache_key = "get_enabled_repo_integrations_%i" % event.repo.repo_id
204 207 query = query.options(
205 208 FromCache("sql_cache_short", cache_key))
206 209 else: # only global integrations
207 210 query = query.filter(global_integrations_filter)
208 211 if cache:
209 212 query = query.options(
210 213 FromCache("sql_cache_short", "get_enabled_global_integrations"))
211 214
212 215 result = query.all()
213 216 return result No newline at end of file
@@ -1,1023 +1,1029 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Repository model for rhodecode
23 23 """
24 24
25 25 import logging
26 26 import os
27 27 import re
28 28 import shutil
29 29 import time
30 30 import traceback
31 31 from datetime import datetime, timedelta
32 32
33 33 from pyramid.threadlocal import get_current_request
34 34 from zope.cachedescriptors.property import Lazy as LazyProperty
35 35
36 36 from rhodecode import events
37 37 from rhodecode.lib import helpers as h
38 38 from rhodecode.lib.auth import HasUserGroupPermissionAny
39 39 from rhodecode.lib.caching_query import FromCache
40 40 from rhodecode.lib.exceptions import AttachedForksError
41 41 from rhodecode.lib.hooks_base import log_delete_repository
42 42 from rhodecode.lib.utils import make_db_config
43 43 from rhodecode.lib.utils2 import (
44 44 safe_str, safe_unicode, remove_prefix, obfuscate_url_pw,
45 45 get_current_rhodecode_user, safe_int, datetime_to_time, action_logger_generic)
46 46 from rhodecode.lib.vcs.backends import get_backend
47 47 from rhodecode.model import BaseModel
48 48 from rhodecode.model.db import (_hash_key,
49 49 Repository, UserRepoToPerm, UserGroupRepoToPerm, UserRepoGroupToPerm,
50 50 UserGroupRepoGroupToPerm, User, Permission, Statistics, UserGroup,
51 51 RepoGroup, RepositoryField)
52 52
53 53 from rhodecode.model.settings import VcsSettingsModel
54 54
55 55
56 56 log = logging.getLogger(__name__)
57 57
58 58
59 59 class RepoModel(BaseModel):
60 60
61 61 cls = Repository
62 62
63 63 def _get_user_group(self, users_group):
64 64 return self._get_instance(UserGroup, users_group,
65 65 callback=UserGroup.get_by_group_name)
66 66
67 67 def _get_repo_group(self, repo_group):
68 68 return self._get_instance(RepoGroup, repo_group,
69 69 callback=RepoGroup.get_by_group_name)
70 70
71 71 def _create_default_perms(self, repository, private):
72 72 # create default permission
73 73 default = 'repository.read'
74 74 def_user = User.get_default_user()
75 75 for p in def_user.user_perms:
76 76 if p.permission.permission_name.startswith('repository.'):
77 77 default = p.permission.permission_name
78 78 break
79 79
80 80 default_perm = 'repository.none' if private else default
81 81
82 82 repo_to_perm = UserRepoToPerm()
83 83 repo_to_perm.permission = Permission.get_by_key(default_perm)
84 84
85 85 repo_to_perm.repository = repository
86 86 repo_to_perm.user_id = def_user.user_id
87 87
88 88 return repo_to_perm
89 89
90 90 @LazyProperty
91 91 def repos_path(self):
92 92 """
93 93 Gets the repositories root path from database
94 94 """
95 95 settings_model = VcsSettingsModel(sa=self.sa)
96 96 return settings_model.get_repos_location()
97 97
98 98 def get(self, repo_id, cache=False):
99 99 repo = self.sa.query(Repository) \
100 100 .filter(Repository.repo_id == repo_id)
101 101
102 102 if cache:
103 103 repo = repo.options(
104 104 FromCache("sql_cache_short", "get_repo_%s" % repo_id))
105 105 return repo.scalar()
106 106
107 107 def get_repo(self, repository):
108 108 return self._get_repo(repository)
109 109
110 110 def get_by_repo_name(self, repo_name, cache=False):
111 111 repo = self.sa.query(Repository) \
112 112 .filter(Repository.repo_name == repo_name)
113 113
114 114 if cache:
115 115 name_key = _hash_key(repo_name)
116 116 repo = repo.options(
117 117 FromCache("sql_cache_short", "get_repo_%s" % name_key))
118 118 return repo.scalar()
119 119
120 120 def _extract_id_from_repo_name(self, repo_name):
121 121 if repo_name.startswith('/'):
122 122 repo_name = repo_name.lstrip('/')
123 123 by_id_match = re.match(r'^_(\d{1,})', repo_name)
124 124 if by_id_match:
125 125 return by_id_match.groups()[0]
126 126
127 127 def get_repo_by_id(self, repo_name):
128 128 """
129 129 Extracts repo_name by id from special urls.
130 130 Example url is _11/repo_name
131 131
132 132 :param repo_name:
133 133 :return: repo object if matched else None
134 134 """
135 135
136 136 try:
137 137 _repo_id = self._extract_id_from_repo_name(repo_name)
138 138 if _repo_id:
139 139 return self.get(_repo_id)
140 140 except Exception:
141 141 log.exception('Failed to extract repo_name from URL')
142 142
143 143 return None
144 144
145 145 def get_repos_for_root(self, root, traverse=False):
146 146 if traverse:
147 147 like_expression = u'{}%'.format(safe_unicode(root))
148 148 repos = Repository.query().filter(
149 149 Repository.repo_name.like(like_expression)).all()
150 150 else:
151 151 if root and not isinstance(root, RepoGroup):
152 152 raise ValueError(
153 153 'Root must be an instance '
154 154 'of RepoGroup, got:{} instead'.format(type(root)))
155 155 repos = Repository.query().filter(Repository.group == root).all()
156 156 return repos
157 157
158 158 def get_url(self, repo, request=None, permalink=False):
159 159 if not request:
160 160 request = get_current_request()
161 161
162 if not request:
163 return
164
162 165 if permalink:
163 166 return request.route_url(
164 167 'repo_summary', repo_name=safe_str(repo.repo_id))
165 168 else:
166 169 return request.route_url(
167 170 'repo_summary', repo_name=safe_str(repo.repo_name))
168 171
169 172 def get_commit_url(self, repo, commit_id, request=None, permalink=False):
170 173 if not request:
171 174 request = get_current_request()
172 175
176 if not request:
177 return
178
173 179 if permalink:
174 180 return request.route_url(
175 181 'repo_commit', repo_name=safe_str(repo.repo_id),
176 182 commit_id=commit_id)
177 183
178 184 else:
179 185 return request.route_url(
180 186 'repo_commit', repo_name=safe_str(repo.repo_name),
181 187 commit_id=commit_id)
182 188
183 189 @classmethod
184 190 def update_repoinfo(cls, repositories=None):
185 191 if not repositories:
186 192 repositories = Repository.getAll()
187 193 for repo in repositories:
188 194 repo.update_commit_cache()
189 195
190 196 def get_repos_as_dict(self, repo_list=None, admin=False,
191 197 super_user_actions=False):
192 198
193 199 from rhodecode.lib.utils import PartialRenderer
194 200 _render = PartialRenderer('data_table/_dt_elements.mako')
195 201 c = _render.c
196 202
197 203 def quick_menu(repo_name):
198 204 return _render('quick_menu', repo_name)
199 205
200 206 def repo_lnk(name, rtype, rstate, private, fork_of):
201 207 return _render('repo_name', name, rtype, rstate, private, fork_of,
202 208 short_name=not admin, admin=False)
203 209
204 210 def last_change(last_change):
205 211 if admin and isinstance(last_change, datetime) and not last_change.tzinfo:
206 212 last_change = last_change + timedelta(seconds=
207 213 (datetime.now() - datetime.utcnow()).seconds)
208 214 return _render("last_change", last_change)
209 215
210 216 def rss_lnk(repo_name):
211 217 return _render("rss", repo_name)
212 218
213 219 def atom_lnk(repo_name):
214 220 return _render("atom", repo_name)
215 221
216 222 def last_rev(repo_name, cs_cache):
217 223 return _render('revision', repo_name, cs_cache.get('revision'),
218 224 cs_cache.get('raw_id'), cs_cache.get('author'),
219 225 cs_cache.get('message'))
220 226
221 227 def desc(desc):
222 228 if c.visual.stylify_metatags:
223 229 desc = h.urlify_text(h.escaped_stylize(desc))
224 230 else:
225 231 desc = h.urlify_text(h.html_escape(desc))
226 232
227 233 return _render('repo_desc', desc)
228 234
229 235 def state(repo_state):
230 236 return _render("repo_state", repo_state)
231 237
232 238 def repo_actions(repo_name):
233 239 return _render('repo_actions', repo_name, super_user_actions)
234 240
235 241 def user_profile(username):
236 242 return _render('user_profile', username)
237 243
238 244 repos_data = []
239 245 for repo in repo_list:
240 246 cs_cache = repo.changeset_cache
241 247 row = {
242 248 "menu": quick_menu(repo.repo_name),
243 249
244 250 "name": repo_lnk(repo.repo_name, repo.repo_type,
245 251 repo.repo_state, repo.private, repo.fork),
246 252 "name_raw": repo.repo_name.lower(),
247 253
248 254 "last_change": last_change(repo.last_db_change),
249 255 "last_change_raw": datetime_to_time(repo.last_db_change),
250 256
251 257 "last_changeset": last_rev(repo.repo_name, cs_cache),
252 258 "last_changeset_raw": cs_cache.get('revision'),
253 259
254 260 "desc": desc(repo.description),
255 261 "owner": user_profile(repo.user.username),
256 262
257 263 "state": state(repo.repo_state),
258 264 "rss": rss_lnk(repo.repo_name),
259 265
260 266 "atom": atom_lnk(repo.repo_name),
261 267 }
262 268 if admin:
263 269 row.update({
264 270 "action": repo_actions(repo.repo_name),
265 271 })
266 272 repos_data.append(row)
267 273
268 274 return repos_data
269 275
270 276 def _get_defaults(self, repo_name):
271 277 """
272 278 Gets information about repository, and returns a dict for
273 279 usage in forms
274 280
275 281 :param repo_name:
276 282 """
277 283
278 284 repo_info = Repository.get_by_repo_name(repo_name)
279 285
280 286 if repo_info is None:
281 287 return None
282 288
283 289 defaults = repo_info.get_dict()
284 290 defaults['repo_name'] = repo_info.just_name
285 291
286 292 groups = repo_info.groups_with_parents
287 293 parent_group = groups[-1] if groups else None
288 294
289 295 # we use -1 as this is how in HTML, we mark an empty group
290 296 defaults['repo_group'] = getattr(parent_group, 'group_id', -1)
291 297
292 298 keys_to_process = (
293 299 {'k': 'repo_type', 'strip': False},
294 300 {'k': 'repo_enable_downloads', 'strip': True},
295 301 {'k': 'repo_description', 'strip': True},
296 302 {'k': 'repo_enable_locking', 'strip': True},
297 303 {'k': 'repo_landing_rev', 'strip': True},
298 304 {'k': 'clone_uri', 'strip': False},
299 305 {'k': 'repo_private', 'strip': True},
300 306 {'k': 'repo_enable_statistics', 'strip': True}
301 307 )
302 308
303 309 for item in keys_to_process:
304 310 attr = item['k']
305 311 if item['strip']:
306 312 attr = remove_prefix(item['k'], 'repo_')
307 313
308 314 val = defaults[attr]
309 315 if item['k'] == 'repo_landing_rev':
310 316 val = ':'.join(defaults[attr])
311 317 defaults[item['k']] = val
312 318 if item['k'] == 'clone_uri':
313 319 defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden
314 320
315 321 # fill owner
316 322 if repo_info.user:
317 323 defaults.update({'user': repo_info.user.username})
318 324 else:
319 325 replacement_user = User.get_first_super_admin().username
320 326 defaults.update({'user': replacement_user})
321 327
322 328 return defaults
323 329
324 330 def update(self, repo, **kwargs):
325 331 try:
326 332 cur_repo = self._get_repo(repo)
327 333 source_repo_name = cur_repo.repo_name
328 334 if 'user' in kwargs:
329 335 cur_repo.user = User.get_by_username(kwargs['user'])
330 336
331 337 if 'repo_group' in kwargs:
332 338 cur_repo.group = RepoGroup.get(kwargs['repo_group'])
333 339 log.debug('Updating repo %s with params:%s', cur_repo, kwargs)
334 340
335 341 update_keys = [
336 342 (1, 'repo_description'),
337 343 (1, 'repo_landing_rev'),
338 344 (1, 'repo_private'),
339 345 (1, 'repo_enable_downloads'),
340 346 (1, 'repo_enable_locking'),
341 347 (1, 'repo_enable_statistics'),
342 348 (0, 'clone_uri'),
343 349 (0, 'fork_id')
344 350 ]
345 351 for strip, k in update_keys:
346 352 if k in kwargs:
347 353 val = kwargs[k]
348 354 if strip:
349 355 k = remove_prefix(k, 'repo_')
350 356
351 357 setattr(cur_repo, k, val)
352 358
353 359 new_name = cur_repo.get_new_name(kwargs['repo_name'])
354 360 cur_repo.repo_name = new_name
355 361
356 362 # if private flag is set, reset default permission to NONE
357 363 if kwargs.get('repo_private'):
358 364 EMPTY_PERM = 'repository.none'
359 365 RepoModel().grant_user_permission(
360 366 repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM
361 367 )
362 368
363 369 # handle extra fields
364 370 for field in filter(lambda k: k.startswith(RepositoryField.PREFIX),
365 371 kwargs):
366 372 k = RepositoryField.un_prefix_key(field)
367 373 ex_field = RepositoryField.get_by_key_name(
368 374 key=k, repo=cur_repo)
369 375 if ex_field:
370 376 ex_field.field_value = kwargs[field]
371 377 self.sa.add(ex_field)
372 378 self.sa.add(cur_repo)
373 379
374 380 if source_repo_name != new_name:
375 381 # rename repository
376 382 self._rename_filesystem_repo(
377 383 old=source_repo_name, new=new_name)
378 384
379 385 return cur_repo
380 386 except Exception:
381 387 log.error(traceback.format_exc())
382 388 raise
383 389
384 390 def _create_repo(self, repo_name, repo_type, description, owner,
385 391 private=False, clone_uri=None, repo_group=None,
386 392 landing_rev='rev:tip', fork_of=None,
387 393 copy_fork_permissions=False, enable_statistics=False,
388 394 enable_locking=False, enable_downloads=False,
389 395 copy_group_permissions=False,
390 396 state=Repository.STATE_PENDING):
391 397 """
392 398 Create repository inside database with PENDING state, this should be
393 399 only executed by create() repo. With exception of importing existing
394 400 repos
395 401 """
396 402 from rhodecode.model.scm import ScmModel
397 403
398 404 owner = self._get_user(owner)
399 405 fork_of = self._get_repo(fork_of)
400 406 repo_group = self._get_repo_group(safe_int(repo_group))
401 407
402 408 try:
403 409 repo_name = safe_unicode(repo_name)
404 410 description = safe_unicode(description)
405 411 # repo name is just a name of repository
406 412 # while repo_name_full is a full qualified name that is combined
407 413 # with name and path of group
408 414 repo_name_full = repo_name
409 415 repo_name = repo_name.split(Repository.NAME_SEP)[-1]
410 416
411 417 new_repo = Repository()
412 418 new_repo.repo_state = state
413 419 new_repo.enable_statistics = False
414 420 new_repo.repo_name = repo_name_full
415 421 new_repo.repo_type = repo_type
416 422 new_repo.user = owner
417 423 new_repo.group = repo_group
418 424 new_repo.description = description or repo_name
419 425 new_repo.private = private
420 426 new_repo.clone_uri = clone_uri
421 427 new_repo.landing_rev = landing_rev
422 428
423 429 new_repo.enable_statistics = enable_statistics
424 430 new_repo.enable_locking = enable_locking
425 431 new_repo.enable_downloads = enable_downloads
426 432
427 433 if repo_group:
428 434 new_repo.enable_locking = repo_group.enable_locking
429 435
430 436 if fork_of:
431 437 parent_repo = fork_of
432 438 new_repo.fork = parent_repo
433 439
434 440 events.trigger(events.RepoPreCreateEvent(new_repo))
435 441
436 442 self.sa.add(new_repo)
437 443
438 444 EMPTY_PERM = 'repository.none'
439 445 if fork_of and copy_fork_permissions:
440 446 repo = fork_of
441 447 user_perms = UserRepoToPerm.query() \
442 448 .filter(UserRepoToPerm.repository == repo).all()
443 449 group_perms = UserGroupRepoToPerm.query() \
444 450 .filter(UserGroupRepoToPerm.repository == repo).all()
445 451
446 452 for perm in user_perms:
447 453 UserRepoToPerm.create(
448 454 perm.user, new_repo, perm.permission)
449 455
450 456 for perm in group_perms:
451 457 UserGroupRepoToPerm.create(
452 458 perm.users_group, new_repo, perm.permission)
453 459 # in case we copy permissions and also set this repo to private
454 460 # override the default user permission to make it a private
455 461 # repo
456 462 if private:
457 463 RepoModel(self.sa).grant_user_permission(
458 464 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
459 465
460 466 elif repo_group and copy_group_permissions:
461 467 user_perms = UserRepoGroupToPerm.query() \
462 468 .filter(UserRepoGroupToPerm.group == repo_group).all()
463 469
464 470 group_perms = UserGroupRepoGroupToPerm.query() \
465 471 .filter(UserGroupRepoGroupToPerm.group == repo_group).all()
466 472
467 473 for perm in user_perms:
468 474 perm_name = perm.permission.permission_name.replace(
469 475 'group.', 'repository.')
470 476 perm_obj = Permission.get_by_key(perm_name)
471 477 UserRepoToPerm.create(perm.user, new_repo, perm_obj)
472 478
473 479 for perm in group_perms:
474 480 perm_name = perm.permission.permission_name.replace(
475 481 'group.', 'repository.')
476 482 perm_obj = Permission.get_by_key(perm_name)
477 483 UserGroupRepoToPerm.create(
478 484 perm.users_group, new_repo, perm_obj)
479 485
480 486 if private:
481 487 RepoModel(self.sa).grant_user_permission(
482 488 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
483 489
484 490 else:
485 491 perm_obj = self._create_default_perms(new_repo, private)
486 492 self.sa.add(perm_obj)
487 493
488 494 # now automatically start following this repository as owner
489 495 ScmModel(self.sa).toggle_following_repo(new_repo.repo_id,
490 496 owner.user_id)
491 497
492 498 # we need to flush here, in order to check if database won't
493 499 # throw any exceptions, create filesystem dirs at the very end
494 500 self.sa.flush()
495 501 events.trigger(events.RepoCreateEvent(new_repo))
496 502 return new_repo
497 503
498 504 except Exception:
499 505 log.error(traceback.format_exc())
500 506 raise
501 507
502 508 def create(self, form_data, cur_user):
503 509 """
504 510 Create repository using celery tasks
505 511
506 512 :param form_data:
507 513 :param cur_user:
508 514 """
509 515 from rhodecode.lib.celerylib import tasks, run_task
510 516 return run_task(tasks.create_repo, form_data, cur_user)
511 517
512 518 def update_permissions(self, repo, perm_additions=None, perm_updates=None,
513 519 perm_deletions=None, check_perms=True,
514 520 cur_user=None):
515 521 if not perm_additions:
516 522 perm_additions = []
517 523 if not perm_updates:
518 524 perm_updates = []
519 525 if not perm_deletions:
520 526 perm_deletions = []
521 527
522 528 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
523 529
524 530 changes = {
525 531 'added': [],
526 532 'updated': [],
527 533 'deleted': []
528 534 }
529 535 # update permissions
530 536 for member_id, perm, member_type in perm_updates:
531 537 member_id = int(member_id)
532 538 if member_type == 'user':
533 539 member_name = User.get(member_id).username
534 540 # this updates also current one if found
535 541 self.grant_user_permission(
536 542 repo=repo, user=member_id, perm=perm)
537 543 else: # set for user group
538 544 # check if we have permissions to alter this usergroup
539 545 member_name = UserGroup.get(member_id).users_group_name
540 546 if not check_perms or HasUserGroupPermissionAny(
541 547 *req_perms)(member_name, user=cur_user):
542 548 self.grant_user_group_permission(
543 549 repo=repo, group_name=member_id, perm=perm)
544 550
545 551 changes['updated'].append({'type': member_type, 'id': member_id,
546 552 'name': member_name, 'new_perm': perm})
547 553
548 554 # set new permissions
549 555 for member_id, perm, member_type in perm_additions:
550 556 member_id = int(member_id)
551 557 if member_type == 'user':
552 558 member_name = User.get(member_id).username
553 559 self.grant_user_permission(
554 560 repo=repo, user=member_id, perm=perm)
555 561 else: # set for user group
556 562 # check if we have permissions to alter this usergroup
557 563 member_name = UserGroup.get(member_id).users_group_name
558 564 if not check_perms or HasUserGroupPermissionAny(
559 565 *req_perms)(member_name, user=cur_user):
560 566 self.grant_user_group_permission(
561 567 repo=repo, group_name=member_id, perm=perm)
562 568 changes['added'].append({'type': member_type, 'id': member_id,
563 569 'name': member_name, 'new_perm': perm})
564 570 # delete permissions
565 571 for member_id, perm, member_type in perm_deletions:
566 572 member_id = int(member_id)
567 573 if member_type == 'user':
568 574 member_name = User.get(member_id).username
569 575 self.revoke_user_permission(repo=repo, user=member_id)
570 576 else: # set for user group
571 577 # check if we have permissions to alter this usergroup
572 578 member_name = UserGroup.get(member_id).users_group_name
573 579 if not check_perms or HasUserGroupPermissionAny(
574 580 *req_perms)(member_name, user=cur_user):
575 581 self.revoke_user_group_permission(
576 582 repo=repo, group_name=member_id)
577 583
578 584 changes['deleted'].append({'type': member_type, 'id': member_id,
579 585 'name': member_name, 'new_perm': perm})
580 586 return changes
581 587
582 588 def create_fork(self, form_data, cur_user):
583 589 """
584 590 Simple wrapper into executing celery task for fork creation
585 591
586 592 :param form_data:
587 593 :param cur_user:
588 594 """
589 595 from rhodecode.lib.celerylib import tasks, run_task
590 596 return run_task(tasks.create_repo_fork, form_data, cur_user)
591 597
592 598 def delete(self, repo, forks=None, fs_remove=True, cur_user=None):
593 599 """
594 600 Delete given repository, forks parameter defines what do do with
595 601 attached forks. Throws AttachedForksError if deleted repo has attached
596 602 forks
597 603
598 604 :param repo:
599 605 :param forks: str 'delete' or 'detach'
600 606 :param fs_remove: remove(archive) repo from filesystem
601 607 """
602 608 if not cur_user:
603 609 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
604 610 repo = self._get_repo(repo)
605 611 if repo:
606 612 if forks == 'detach':
607 613 for r in repo.forks:
608 614 r.fork = None
609 615 self.sa.add(r)
610 616 elif forks == 'delete':
611 617 for r in repo.forks:
612 618 self.delete(r, forks='delete')
613 619 elif [f for f in repo.forks]:
614 620 raise AttachedForksError()
615 621
616 622 old_repo_dict = repo.get_dict()
617 623 events.trigger(events.RepoPreDeleteEvent(repo))
618 624 try:
619 625 self.sa.delete(repo)
620 626 if fs_remove:
621 627 self._delete_filesystem_repo(repo)
622 628 else:
623 629 log.debug('skipping removal from filesystem')
624 630 old_repo_dict.update({
625 631 'deleted_by': cur_user,
626 632 'deleted_on': time.time(),
627 633 })
628 634 log_delete_repository(**old_repo_dict)
629 635 events.trigger(events.RepoDeleteEvent(repo))
630 636 except Exception:
631 637 log.error(traceback.format_exc())
632 638 raise
633 639
634 640 def grant_user_permission(self, repo, user, perm):
635 641 """
636 642 Grant permission for user on given repository, or update existing one
637 643 if found
638 644
639 645 :param repo: Instance of Repository, repository_id, or repository name
640 646 :param user: Instance of User, user_id or username
641 647 :param perm: Instance of Permission, or permission_name
642 648 """
643 649 user = self._get_user(user)
644 650 repo = self._get_repo(repo)
645 651 permission = self._get_perm(perm)
646 652
647 653 # check if we have that permission already
648 654 obj = self.sa.query(UserRepoToPerm) \
649 655 .filter(UserRepoToPerm.user == user) \
650 656 .filter(UserRepoToPerm.repository == repo) \
651 657 .scalar()
652 658 if obj is None:
653 659 # create new !
654 660 obj = UserRepoToPerm()
655 661 obj.repository = repo
656 662 obj.user = user
657 663 obj.permission = permission
658 664 self.sa.add(obj)
659 665 log.debug('Granted perm %s to %s on %s', perm, user, repo)
660 666 action_logger_generic(
661 667 'granted permission: {} to user: {} on repo: {}'.format(
662 668 perm, user, repo), namespace='security.repo')
663 669 return obj
664 670
665 671 def revoke_user_permission(self, repo, user):
666 672 """
667 673 Revoke permission for user on given repository
668 674
669 675 :param repo: Instance of Repository, repository_id, or repository name
670 676 :param user: Instance of User, user_id or username
671 677 """
672 678
673 679 user = self._get_user(user)
674 680 repo = self._get_repo(repo)
675 681
676 682 obj = self.sa.query(UserRepoToPerm) \
677 683 .filter(UserRepoToPerm.repository == repo) \
678 684 .filter(UserRepoToPerm.user == user) \
679 685 .scalar()
680 686 if obj:
681 687 self.sa.delete(obj)
682 688 log.debug('Revoked perm on %s on %s', repo, user)
683 689 action_logger_generic(
684 690 'revoked permission from user: {} on repo: {}'.format(
685 691 user, repo), namespace='security.repo')
686 692
687 693 def grant_user_group_permission(self, repo, group_name, perm):
688 694 """
689 695 Grant permission for user group on given repository, or update
690 696 existing one if found
691 697
692 698 :param repo: Instance of Repository, repository_id, or repository name
693 699 :param group_name: Instance of UserGroup, users_group_id,
694 700 or user group name
695 701 :param perm: Instance of Permission, or permission_name
696 702 """
697 703 repo = self._get_repo(repo)
698 704 group_name = self._get_user_group(group_name)
699 705 permission = self._get_perm(perm)
700 706
701 707 # check if we have that permission already
702 708 obj = self.sa.query(UserGroupRepoToPerm) \
703 709 .filter(UserGroupRepoToPerm.users_group == group_name) \
704 710 .filter(UserGroupRepoToPerm.repository == repo) \
705 711 .scalar()
706 712
707 713 if obj is None:
708 714 # create new
709 715 obj = UserGroupRepoToPerm()
710 716
711 717 obj.repository = repo
712 718 obj.users_group = group_name
713 719 obj.permission = permission
714 720 self.sa.add(obj)
715 721 log.debug('Granted perm %s to %s on %s', perm, group_name, repo)
716 722 action_logger_generic(
717 723 'granted permission: {} to usergroup: {} on repo: {}'.format(
718 724 perm, group_name, repo), namespace='security.repo')
719 725
720 726 return obj
721 727
722 728 def revoke_user_group_permission(self, repo, group_name):
723 729 """
724 730 Revoke permission for user group on given repository
725 731
726 732 :param repo: Instance of Repository, repository_id, or repository name
727 733 :param group_name: Instance of UserGroup, users_group_id,
728 734 or user group name
729 735 """
730 736 repo = self._get_repo(repo)
731 737 group_name = self._get_user_group(group_name)
732 738
733 739 obj = self.sa.query(UserGroupRepoToPerm) \
734 740 .filter(UserGroupRepoToPerm.repository == repo) \
735 741 .filter(UserGroupRepoToPerm.users_group == group_name) \
736 742 .scalar()
737 743 if obj:
738 744 self.sa.delete(obj)
739 745 log.debug('Revoked perm to %s on %s', repo, group_name)
740 746 action_logger_generic(
741 747 'revoked permission from usergroup: {} on repo: {}'.format(
742 748 group_name, repo), namespace='security.repo')
743 749
744 750 def delete_stats(self, repo_name):
745 751 """
746 752 removes stats for given repo
747 753
748 754 :param repo_name:
749 755 """
750 756 repo = self._get_repo(repo_name)
751 757 try:
752 758 obj = self.sa.query(Statistics) \
753 759 .filter(Statistics.repository == repo).scalar()
754 760 if obj:
755 761 self.sa.delete(obj)
756 762 except Exception:
757 763 log.error(traceback.format_exc())
758 764 raise
759 765
760 766 def add_repo_field(self, repo_name, field_key, field_label, field_value='',
761 767 field_type='str', field_desc=''):
762 768
763 769 repo = self._get_repo(repo_name)
764 770
765 771 new_field = RepositoryField()
766 772 new_field.repository = repo
767 773 new_field.field_key = field_key
768 774 new_field.field_type = field_type # python type
769 775 new_field.field_value = field_value
770 776 new_field.field_desc = field_desc
771 777 new_field.field_label = field_label
772 778 self.sa.add(new_field)
773 779 return new_field
774 780
775 781 def delete_repo_field(self, repo_name, field_key):
776 782 repo = self._get_repo(repo_name)
777 783 field = RepositoryField.get_by_key_name(field_key, repo)
778 784 if field:
779 785 self.sa.delete(field)
780 786
781 787 def _create_filesystem_repo(self, repo_name, repo_type, repo_group,
782 788 clone_uri=None, repo_store_location=None,
783 789 use_global_config=False):
784 790 """
785 791 makes repository on filesystem. It's group aware means it'll create
786 792 a repository within a group, and alter the paths accordingly of
787 793 group location
788 794
789 795 :param repo_name:
790 796 :param alias:
791 797 :param parent:
792 798 :param clone_uri:
793 799 :param repo_store_location:
794 800 """
795 801 from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group
796 802 from rhodecode.model.scm import ScmModel
797 803
798 804 if Repository.NAME_SEP in repo_name:
799 805 raise ValueError(
800 806 'repo_name must not contain groups got `%s`' % repo_name)
801 807
802 808 if isinstance(repo_group, RepoGroup):
803 809 new_parent_path = os.sep.join(repo_group.full_path_splitted)
804 810 else:
805 811 new_parent_path = repo_group or ''
806 812
807 813 if repo_store_location:
808 814 _paths = [repo_store_location]
809 815 else:
810 816 _paths = [self.repos_path, new_parent_path, repo_name]
811 817 # we need to make it str for mercurial
812 818 repo_path = os.path.join(*map(lambda x: safe_str(x), _paths))
813 819
814 820 # check if this path is not a repository
815 821 if is_valid_repo(repo_path, self.repos_path):
816 822 raise Exception('This path %s is a valid repository' % repo_path)
817 823
818 824 # check if this path is a group
819 825 if is_valid_repo_group(repo_path, self.repos_path):
820 826 raise Exception('This path %s is a valid group' % repo_path)
821 827
822 828 log.info('creating repo %s in %s from url: `%s`',
823 829 repo_name, safe_unicode(repo_path),
824 830 obfuscate_url_pw(clone_uri))
825 831
826 832 backend = get_backend(repo_type)
827 833
828 834 config_repo = None if use_global_config else repo_name
829 835 if config_repo and new_parent_path:
830 836 config_repo = Repository.NAME_SEP.join(
831 837 (new_parent_path, config_repo))
832 838 config = make_db_config(clear_session=False, repo=config_repo)
833 839 config.set('extensions', 'largefiles', '')
834 840
835 841 # patch and reset hooks section of UI config to not run any
836 842 # hooks on creating remote repo
837 843 config.clear_section('hooks')
838 844
839 845 # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice
840 846 if repo_type == 'git':
841 847 repo = backend(
842 848 repo_path, config=config, create=True, src_url=clone_uri,
843 849 bare=True)
844 850 else:
845 851 repo = backend(
846 852 repo_path, config=config, create=True, src_url=clone_uri)
847 853
848 854 ScmModel().install_hooks(repo, repo_type=repo_type)
849 855
850 856 log.debug('Created repo %s with %s backend',
851 857 safe_unicode(repo_name), safe_unicode(repo_type))
852 858 return repo
853 859
854 860 def _rename_filesystem_repo(self, old, new):
855 861 """
856 862 renames repository on filesystem
857 863
858 864 :param old: old name
859 865 :param new: new name
860 866 """
861 867 log.info('renaming repo from %s to %s', old, new)
862 868
863 869 old_path = os.path.join(self.repos_path, old)
864 870 new_path = os.path.join(self.repos_path, new)
865 871 if os.path.isdir(new_path):
866 872 raise Exception(
867 873 'Was trying to rename to already existing dir %s' % new_path
868 874 )
869 875 shutil.move(old_path, new_path)
870 876
871 877 def _delete_filesystem_repo(self, repo):
872 878 """
873 879 removes repo from filesystem, the removal is acctually made by
874 880 added rm__ prefix into dir, and rename internat .hg/.git dirs so this
875 881 repository is no longer valid for rhodecode, can be undeleted later on
876 882 by reverting the renames on this repository
877 883
878 884 :param repo: repo object
879 885 """
880 886 rm_path = os.path.join(self.repos_path, repo.repo_name)
881 887 repo_group = repo.group
882 888 log.info("Removing repository %s", rm_path)
883 889 # disable hg/git internal that it doesn't get detected as repo
884 890 alias = repo.repo_type
885 891
886 892 config = make_db_config(clear_session=False)
887 893 config.set('extensions', 'largefiles', '')
888 894 bare = getattr(repo.scm_instance(config=config), 'bare', False)
889 895
890 896 # skip this for bare git repos
891 897 if not bare:
892 898 # disable VCS repo
893 899 vcs_path = os.path.join(rm_path, '.%s' % alias)
894 900 if os.path.exists(vcs_path):
895 901 shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias))
896 902
897 903 _now = datetime.now()
898 904 _ms = str(_now.microsecond).rjust(6, '0')
899 905 _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms),
900 906 repo.just_name)
901 907 if repo_group:
902 908 # if repository is in group, prefix the removal path with the group
903 909 args = repo_group.full_path_splitted + [_d]
904 910 _d = os.path.join(*args)
905 911
906 912 if os.path.isdir(rm_path):
907 913 shutil.move(rm_path, os.path.join(self.repos_path, _d))
908 914
909 915
910 916 class ReadmeFinder:
911 917 """
912 918 Utility which knows how to find a readme for a specific commit.
913 919
914 920 The main idea is that this is a configurable algorithm. When creating an
915 921 instance you can define parameters, currently only the `default_renderer`.
916 922 Based on this configuration the method :meth:`search` behaves slightly
917 923 different.
918 924 """
919 925
920 926 readme_re = re.compile(r'^readme(\.[^\.]+)?$', re.IGNORECASE)
921 927 path_re = re.compile(r'^docs?', re.IGNORECASE)
922 928
923 929 default_priorities = {
924 930 None: 0,
925 931 '.text': 2,
926 932 '.txt': 3,
927 933 '.rst': 1,
928 934 '.rest': 2,
929 935 '.md': 1,
930 936 '.mkdn': 2,
931 937 '.mdown': 3,
932 938 '.markdown': 4,
933 939 }
934 940
935 941 path_priority = {
936 942 'doc': 0,
937 943 'docs': 1,
938 944 }
939 945
940 946 FALLBACK_PRIORITY = 99
941 947
942 948 RENDERER_TO_EXTENSION = {
943 949 'rst': ['.rst', '.rest'],
944 950 'markdown': ['.md', 'mkdn', '.mdown', '.markdown'],
945 951 }
946 952
947 953 def __init__(self, default_renderer=None):
948 954 self._default_renderer = default_renderer
949 955 self._renderer_extensions = self.RENDERER_TO_EXTENSION.get(
950 956 default_renderer, [])
951 957
952 958 def search(self, commit, path='/'):
953 959 """
954 960 Find a readme in the given `commit`.
955 961 """
956 962 nodes = commit.get_nodes(path)
957 963 matches = self._match_readmes(nodes)
958 964 matches = self._sort_according_to_priority(matches)
959 965 if matches:
960 966 return matches[0].node
961 967
962 968 paths = self._match_paths(nodes)
963 969 paths = self._sort_paths_according_to_priority(paths)
964 970 for path in paths:
965 971 match = self.search(commit, path=path)
966 972 if match:
967 973 return match
968 974
969 975 return None
970 976
971 977 def _match_readmes(self, nodes):
972 978 for node in nodes:
973 979 if not node.is_file():
974 980 continue
975 981 path = node.path.rsplit('/', 1)[-1]
976 982 match = self.readme_re.match(path)
977 983 if match:
978 984 extension = match.group(1)
979 985 yield ReadmeMatch(node, match, self._priority(extension))
980 986
981 987 def _match_paths(self, nodes):
982 988 for node in nodes:
983 989 if not node.is_dir():
984 990 continue
985 991 match = self.path_re.match(node.path)
986 992 if match:
987 993 yield node.path
988 994
989 995 def _priority(self, extension):
990 996 renderer_priority = (
991 997 0 if extension in self._renderer_extensions else 1)
992 998 extension_priority = self.default_priorities.get(
993 999 extension, self.FALLBACK_PRIORITY)
994 1000 return (renderer_priority, extension_priority)
995 1001
996 1002 def _sort_according_to_priority(self, matches):
997 1003
998 1004 def priority_and_path(match):
999 1005 return (match.priority, match.path)
1000 1006
1001 1007 return sorted(matches, key=priority_and_path)
1002 1008
1003 1009 def _sort_paths_according_to_priority(self, paths):
1004 1010
1005 1011 def priority_and_path(path):
1006 1012 return (self.path_priority.get(path, self.FALLBACK_PRIORITY), path)
1007 1013
1008 1014 return sorted(paths, key=priority_and_path)
1009 1015
1010 1016
1011 1017 class ReadmeMatch:
1012 1018
1013 1019 def __init__(self, node, match, priority):
1014 1020 self.node = node
1015 1021 self._match = match
1016 1022 self.priority = priority
1017 1023
1018 1024 @property
1019 1025 def path(self):
1020 1026 return self.node.path
1021 1027
1022 1028 def __repr__(self):
1023 1029 return '<ReadmeMatch {} priority={}'.format(self.path, self.priority)
General Comments 0
You need to be logged in to leave comments. Login now