##// END OF EJS Templates
events: fix bugs with serialization of repo/pr events and add tests for those cases
dan -
r389:06163eeb default
parent child Browse files
Show More
@@ -1,387 +1,388 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Pylons middleware initialization
23 23 """
24 24 import logging
25 25
26 26 from paste.registry import RegistryManager
27 27 from paste.gzipper import make_gzip_middleware
28 28 from pylons.wsgiapp import PylonsApp
29 29 from pyramid.authorization import ACLAuthorizationPolicy
30 30 from pyramid.config import Configurator
31 31 from pyramid.static import static_view
32 32 from pyramid.settings import asbool, aslist
33 33 from pyramid.wsgi import wsgiapp
34 34 from pyramid.httpexceptions import HTTPError, HTTPInternalServerError
35 35 import pyramid.httpexceptions as httpexceptions
36 36 from pyramid.renderers import render_to_response, render
37 37 from routes.middleware import RoutesMiddleware
38 38 import routes.util
39 39
40 40 import rhodecode
41 41 from rhodecode.config import patches
42 42 from rhodecode.config.environment import (
43 43 load_environment, load_pyramid_environment)
44 44 from rhodecode.lib.middleware import csrf
45 45 from rhodecode.lib.middleware.appenlight import wrap_in_appenlight_if_enabled
46 46 from rhodecode.lib.middleware.disable_vcs import DisableVCSPagesWrapper
47 47 from rhodecode.lib.middleware.https_fixup import HttpsFixup
48 48 from rhodecode.lib.middleware.vcs import VCSMiddleware
49 49 from rhodecode.lib.plugins.utils import register_rhodecode_plugin
50 50
51 51
52 52 log = logging.getLogger(__name__)
53 53
54 54
55 55 def make_app(global_conf, full_stack=True, static_files=True, **app_conf):
56 56 """Create a Pylons WSGI application and return it
57 57
58 58 ``global_conf``
59 59 The inherited configuration for this application. Normally from
60 60 the [DEFAULT] section of the Paste ini file.
61 61
62 62 ``full_stack``
63 63 Whether or not this application provides a full WSGI stack (by
64 64 default, meaning it handles its own exceptions and errors).
65 65 Disable full_stack when this application is "managed" by
66 66 another WSGI middleware.
67 67
68 68 ``app_conf``
69 69 The application's local configuration. Normally specified in
70 70 the [app:<name>] section of the Paste ini file (where <name>
71 71 defaults to main).
72 72
73 73 """
74 74 # Apply compatibility patches
75 75 patches.kombu_1_5_1_python_2_7_11()
76 76 patches.inspect_getargspec()
77 77
78 78 # Configure the Pylons environment
79 79 config = load_environment(global_conf, app_conf)
80 80
81 81 # The Pylons WSGI app
82 82 app = PylonsApp(config=config)
83 83 if rhodecode.is_test:
84 84 app = csrf.CSRFDetector(app)
85 85
86 86 expected_origin = config.get('expected_origin')
87 87 if expected_origin:
88 88 # The API can be accessed from other Origins.
89 89 app = csrf.OriginChecker(app, expected_origin,
90 90 skip_urls=[routes.util.url_for('api')])
91 91
92 92
93 93 if asbool(full_stack):
94 94
95 95 # Appenlight monitoring and error handler
96 96 app, appenlight_client = wrap_in_appenlight_if_enabled(app, config)
97 97
98 98 # we want our low level middleware to get to the request ASAP. We don't
99 99 # need any pylons stack middleware in them
100 100 app = VCSMiddleware(app, config, appenlight_client)
101 101
102 102 # Establish the Registry for this application
103 103 app = RegistryManager(app)
104 104
105 105 app.config = config
106 106
107 107 return app
108 108
109 109
110 110 def make_pyramid_app(global_config, **settings):
111 111 """
112 112 Constructs the WSGI application based on Pyramid and wraps the Pylons based
113 113 application.
114 114
115 115 Specials:
116 116
117 117 * We migrate from Pylons to Pyramid. While doing this, we keep both
118 118 frameworks functional. This involves moving some WSGI middlewares around
119 119 and providing access to some data internals, so that the old code is
120 120 still functional.
121 121
122 122 * The application can also be integrated like a plugin via the call to
123 123 `includeme`. This is accompanied with the other utility functions which
124 124 are called. Changing this should be done with great care to not break
125 125 cases when these fragments are assembled from another place.
126 126
127 127 """
128 128 # The edition string should be available in pylons too, so we add it here
129 129 # before copying the settings.
130 130 settings.setdefault('rhodecode.edition', 'Community Edition')
131 131
132 132 # As long as our Pylons application does expect "unprepared" settings, make
133 133 # sure that we keep an unmodified copy. This avoids unintentional change of
134 134 # behavior in the old application.
135 135 settings_pylons = settings.copy()
136 136
137 137 sanitize_settings_and_apply_defaults(settings)
138 138 config = Configurator(settings=settings)
139 139 add_pylons_compat_data(config.registry, global_config, settings_pylons)
140 140
141 141 load_pyramid_environment(global_config, settings)
142 142
143 143 includeme(config)
144 144 includeme_last(config)
145 145 pyramid_app = config.make_wsgi_app()
146 146 pyramid_app = wrap_app_in_wsgi_middlewares(pyramid_app, config)
147 147 return pyramid_app
148 148
149 149
150 150 def add_pylons_compat_data(registry, global_config, settings):
151 151 """
152 152 Attach data to the registry to support the Pylons integration.
153 153 """
154 154 registry._pylons_compat_global_config = global_config
155 155 registry._pylons_compat_settings = settings
156 156
157 157
158 158 def webob_to_pyramid_http_response(webob_response):
159 159 ResponseClass = httpexceptions.status_map[webob_response.status_int]
160 160 pyramid_response = ResponseClass(webob_response.status)
161 161 pyramid_response.status = webob_response.status
162 162 pyramid_response.headers.update(webob_response.headers)
163 163 if pyramid_response.headers['content-type'] == 'text/html':
164 164 pyramid_response.headers['content-type'] = 'text/html; charset=UTF-8'
165 165 return pyramid_response
166 166
167 167
168 168 def error_handler(exception, request):
169 169 # TODO: dan: replace the old pylons error controller with this
170 170 from rhodecode.model.settings import SettingsModel
171 171 from rhodecode.lib.utils2 import AttributeDict
172 172
173 173 try:
174 174 rc_config = SettingsModel().get_all_settings()
175 175 except Exception:
176 176 log.exception('failed to fetch settings')
177 177 rc_config = {}
178 178
179 179 base_response = HTTPInternalServerError()
180 180 # prefer original exception for the response since it may have headers set
181 181 if isinstance(exception, HTTPError):
182 182 base_response = exception
183 183
184 184 c = AttributeDict()
185 185 c.error_message = base_response.status
186 186 c.error_explanation = base_response.explanation or str(base_response)
187 187 c.visual = AttributeDict()
188 188
189 189 c.visual.rhodecode_support_url = (
190 190 request.registry.settings.get('rhodecode_support_url') or
191 191 request.route_url('rhodecode_support')
192 192 )
193 193 c.redirect_time = 0
194 194 c.rhodecode_name = rc_config.get('rhodecode_title', '')
195 195 if not c.rhodecode_name:
196 196 c.rhodecode_name = 'Rhodecode'
197 197
198 198 response = render_to_response(
199 199 '/errors/error_document.html', {'c': c}, request=request,
200 200 response=base_response)
201 201
202 202 return response
203 203
204 204
205 205 def includeme(config):
206 206 settings = config.registry.settings
207 207
208 208 if asbool(settings.get('appenlight', 'false')):
209 209 config.include('appenlight_client.ext.pyramid_tween')
210 210
211 211 # Includes which are required. The application would fail without them.
212 212 config.include('pyramid_mako')
213 213 config.include('pyramid_beaker')
214 214 config.include('rhodecode.admin')
215 config.include('rhodecode.integrations')
215 216 config.include('rhodecode.authentication')
216 217 config.include('rhodecode.login')
217 218 config.include('rhodecode.tweens')
218 219 config.include('rhodecode.api')
219 220 config.add_route(
220 221 'rhodecode_support', 'https://rhodecode.com/help/', static=True)
221 222
222 223 # Set the authorization policy.
223 224 authz_policy = ACLAuthorizationPolicy()
224 225 config.set_authorization_policy(authz_policy)
225 226
226 227 # Set the default renderer for HTML templates to mako.
227 228 config.add_mako_renderer('.html')
228 229
229 230 # plugin information
230 231 config.registry.rhodecode_plugins = {}
231 232
232 233 config.add_directive(
233 234 'register_rhodecode_plugin', register_rhodecode_plugin)
234 235 # include RhodeCode plugins
235 236 includes = aslist(settings.get('rhodecode.includes', []))
236 237 for inc in includes:
237 238 config.include(inc)
238 239
239 240 pylons_app = make_app(
240 241 config.registry._pylons_compat_global_config,
241 242 **config.registry._pylons_compat_settings)
242 243 config.registry._pylons_compat_config = pylons_app.config
243 244
244 245 pylons_app_as_view = wsgiapp(pylons_app)
245 246
246 247 # Protect from VCS Server error related pages when server is not available
247 248 vcs_server_enabled = asbool(settings.get('vcs.server.enable', 'true'))
248 249 if not vcs_server_enabled:
249 250 pylons_app_as_view = DisableVCSPagesWrapper(pylons_app_as_view)
250 251
251 252
252 253 def pylons_app_with_error_handler(context, request):
253 254 """
254 255 Handle exceptions from rc pylons app:
255 256
256 257 - old webob type exceptions get converted to pyramid exceptions
257 258 - pyramid exceptions are passed to the error handler view
258 259 """
259 260 try:
260 261 response = pylons_app_as_view(context, request)
261 262 if 400 <= response.status_int <= 599: # webob type error responses
262 263 return error_handler(
263 264 webob_to_pyramid_http_response(response), request)
264 265 except HTTPError as e: # pyramid type exceptions
265 266 return error_handler(e, request)
266 267 except Exception:
267 268 if settings.get('debugtoolbar.enabled', False):
268 269 raise
269 270 return error_handler(HTTPInternalServerError(), request)
270 271 return response
271 272
272 273 # This is the glue which allows us to migrate in chunks. By registering the
273 274 # pylons based application as the "Not Found" view in Pyramid, we will
274 275 # fallback to the old application each time the new one does not yet know
275 276 # how to handle a request.
276 277 config.add_notfound_view(pylons_app_with_error_handler)
277 278
278 279 if settings.get('debugtoolbar.enabled', False):
279 280 # if toolbar, then only http type exceptions get caught and rendered
280 281 ExcClass = HTTPError
281 282 else:
282 283 # if no toolbar, then any exception gets caught and rendered
283 284 ExcClass = Exception
284 285 config.add_view(error_handler, context=ExcClass)
285 286
286 287
287 288 def includeme_last(config):
288 289 """
289 290 The static file catchall needs to be last in the view configuration.
290 291 """
291 292 settings = config.registry.settings
292 293
293 294 # Note: johbo: I would prefer to register a prefix for static files at some
294 295 # point, e.g. move them under '_static/'. This would fully avoid that we
295 296 # can have name clashes with a repository name. Imaging someone calling his
296 297 # repo "css" ;-) Also having an external web server to serve out the static
297 298 # files seems to be easier to set up if they have a common prefix.
298 299 #
299 300 # Example: config.add_static_view('_static', path='rhodecode:public')
300 301 #
301 302 # It might be an option to register both paths for a while and then migrate
302 303 # over to the new location.
303 304
304 305 # Serving static files with a catchall.
305 306 if settings['static_files']:
306 307 config.add_route('catchall_static', '/*subpath')
307 308 config.add_view(
308 309 static_view('rhodecode:public'), route_name='catchall_static')
309 310
310 311
311 312 def wrap_app_in_wsgi_middlewares(pyramid_app, config):
312 313 """
313 314 Apply outer WSGI middlewares around the application.
314 315
315 316 Part of this has been moved up from the Pylons layer, so that the
316 317 data is also available if old Pylons code is hit through an already ported
317 318 view.
318 319 """
319 320 settings = config.registry.settings
320 321
321 322 # enable https redirects based on HTTP_X_URL_SCHEME set by proxy
322 323 pyramid_app = HttpsFixup(pyramid_app, settings)
323 324
324 325 # Add RoutesMiddleware to support the pylons compatibility tween during
325 326
326 327 # migration to pyramid.
327 328 pyramid_app = RoutesMiddleware(
328 329 pyramid_app, config.registry._pylons_compat_config['routes.map'])
329 330
330 331 if asbool(settings.get('appenlight', 'false')):
331 332 pyramid_app, _ = wrap_in_appenlight_if_enabled(
332 333 pyramid_app, config.registry._pylons_compat_config)
333 334
334 335 # TODO: johbo: Don't really see why we enable the gzip middleware when
335 336 # serving static files, might be something that should have its own setting
336 337 # as well?
337 338 if settings['static_files']:
338 339 pyramid_app = make_gzip_middleware(
339 340 pyramid_app, settings, compress_level=1)
340 341
341 342 return pyramid_app
342 343
343 344
344 345 def sanitize_settings_and_apply_defaults(settings):
345 346 """
346 347 Applies settings defaults and does all type conversion.
347 348
348 349 We would move all settings parsing and preparation into this place, so that
349 350 we have only one place left which deals with this part. The remaining parts
350 351 of the application would start to rely fully on well prepared settings.
351 352
352 353 This piece would later be split up per topic to avoid a big fat monster
353 354 function.
354 355 """
355 356
356 357 # Pyramid's mako renderer has to search in the templates folder so that the
357 358 # old templates still work. Ported and new templates are expected to use
358 359 # real asset specifications for the includes.
359 360 mako_directories = settings.setdefault('mako.directories', [
360 361 # Base templates of the original Pylons application
361 362 'rhodecode:templates',
362 363 ])
363 364 log.debug(
364 365 "Using the following Mako template directories: %s",
365 366 mako_directories)
366 367
367 368 # Default includes, possible to change as a user
368 369 pyramid_includes = settings.setdefault('pyramid.includes', [
369 370 'rhodecode.lib.middleware.request_wrapper',
370 371 ])
371 372 log.debug(
372 373 "Using the following pyramid.includes: %s",
373 374 pyramid_includes)
374 375
375 376 # TODO: johbo: Re-think this, usually the call to config.include
376 377 # should allow to pass in a prefix.
377 378 settings.setdefault('rhodecode.api.url', '/_admin/api')
378 379
379 380 _bool_setting(settings, 'vcs.server.enable', 'true')
380 381 _bool_setting(settings, 'static_files', 'true')
381 382 _bool_setting(settings, 'is_test', 'false')
382 383
383 384 return settings
384 385
385 386
386 387 def _bool_setting(settings, name, default):
387 388 settings[name] = asbool(settings.get(name, default))
@@ -1,69 +1,70 b''
1 1 # Copyright (C) 2016-2016 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 from datetime import datetime
20 20 from marshmallow import Schema, fields
21 21 from pyramid.threadlocal import get_current_request
22 22 from rhodecode.lib.utils2 import AttributeDict
23 23
24 24
25 25 SYSTEM_USER = AttributeDict(dict(
26 26 username='__SYSTEM__'
27 27 ))
28 28
29 29
30 30 class UserSchema(Schema):
31 31 """
32 32 Marshmallow schema for a user
33 33 """
34 34 username = fields.Str()
35 35
36 36
37 37 class RhodecodeEventSchema(Schema):
38 38 """
39 39 Marshmallow schema for a rhodecode event
40 40 """
41 41 utc_timestamp = fields.DateTime()
42 acting_user = fields.Nested(UserSchema)
43 acting_ip = fields.Str()
42 actor = fields.Nested(UserSchema)
43 actor_ip = fields.Str()
44 name = fields.Str(attribute='name')
44 45
45 46
46 47 class RhodecodeEvent(object):
47 48 """
48 49 Base event class for all Rhodecode events
49 50 """
50 51 MarshmallowSchema = RhodecodeEventSchema
51 52
52 53 def __init__(self):
53 54 self.request = get_current_request()
54 55 self.utc_timestamp = datetime.utcnow()
55 56
56 57 @property
57 def acting_user(self):
58 def actor(self):
58 59 if self.request:
59 60 return self.request.user.get_instance()
60 61 return SYSTEM_USER
61 62
62 63 @property
63 def acting_ip(self):
64 def actor_ip(self):
64 65 if self.request:
65 66 return self.request.user.ip_addr
66 67 return '<no ip available>'
67 68
68 69 def as_dict(self):
69 70 return self.MarshmallowSchema().dump(self).data No newline at end of file
@@ -1,149 +1,149 b''
1 1 # Copyright (C) 2016-2016 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 from marshmallow import Schema, fields
20 20
21 from rhodecode.model.db import Repository, Session
21 from rhodecode.model.db import User, Repository, Session
22 22 from rhodecode.events.base import RhodecodeEvent
23 23
24 24
25 def get_pull_request_url(repo):
25 def get_repo_url(repo):
26 26 from rhodecode.model.repo import RepoModel
27 27 return RepoModel().get_url(repo)
28 28
29 29
30 30 class RepositorySchema(Schema):
31 31 """
32 32 Marshmallow schema for a repository
33 33 """
34 34 repo_id = fields.Integer()
35 35 repo_name = fields.Str()
36 url = fields.Function(get_pull_request_url)
36 url = fields.Function(get_repo_url)
37 37
38 38
39 39 class RepoEventSchema(RhodecodeEvent.MarshmallowSchema):
40 40 """
41 41 Marshmallow schema for a repository event
42 42 """
43 repository = fields.Nested(RepositorySchema)
43 repo = fields.Nested(RepositorySchema)
44 44
45 45
46 46 class RepoEvent(RhodecodeEvent):
47 47 """
48 48 Base class for events acting on a repository.
49 49
50 50 :param repo: a :class:`Repository` instance
51 51 """
52 52 MarshmallowSchema = RepoEventSchema
53 53
54 54 def __init__(self, repo):
55 55 super(RepoEvent, self).__init__()
56 56 self.repo = repo
57 57
58 58
59 59 class RepoPreCreateEvent(RepoEvent):
60 60 """
61 61 An instance of this class is emitted as an :term:`event` before a repo is
62 62 created.
63 63 """
64 64 name = 'repo-pre-create'
65 65
66 66
67 67 class RepoCreatedEvent(RepoEvent):
68 68 """
69 69 An instance of this class is emitted as an :term:`event` whenever a repo is
70 70 created.
71 71 """
72 72 name = 'repo-created'
73 73
74 74
75 75 class RepoPreDeleteEvent(RepoEvent):
76 76 """
77 77 An instance of this class is emitted as an :term:`event` whenever a repo is
78 78 created.
79 79 """
80 80 name = 'repo-pre-delete'
81 81
82 82
83 83 class RepoDeletedEvent(RepoEvent):
84 84 """
85 85 An instance of this class is emitted as an :term:`event` whenever a repo is
86 86 created.
87 87 """
88 88 name = 'repo-deleted'
89 89
90 90
91 91 class RepoVCSEvent(RepoEvent):
92 92 """
93 93 Base class for events triggered by the VCS
94 94 """
95 95 def __init__(self, repo_name, extras):
96 96 self.repo = Repository.get_by_repo_name(repo_name)
97 97 if not self.repo:
98 98 raise Exception('repo by this name %s does not exist' % repo_name)
99 99 self.extras = extras
100 100 super(RepoVCSEvent, self).__init__(self.repo)
101 101
102 102 @property
103 def acting_user(self):
103 def actor(self):
104 104 if self.extras.get('username'):
105 return User.get_by_username(extras['username'])
105 return User.get_by_username(self.extras['username'])
106 106
107 107 @property
108 def acting_ip(self):
108 def actor_ip(self):
109 109 if self.extras.get('ip'):
110 return User.get_by_username(extras['ip'])
110 return self.extras['ip']
111 111
112 112
113 113 class RepoPrePullEvent(RepoVCSEvent):
114 114 """
115 115 An instance of this class is emitted as an :term:`event` before commits
116 116 are pulled from a repo.
117 117 """
118 118 name = 'repo-pre-pull'
119 119
120 120
121 121 class RepoPullEvent(RepoVCSEvent):
122 122 """
123 123 An instance of this class is emitted as an :term:`event` after commits
124 124 are pulled from a repo.
125 125 """
126 126 name = 'repo-pull'
127 127
128 128
129 129 class RepoPrePushEvent(RepoVCSEvent):
130 130 """
131 131 An instance of this class is emitted as an :term:`event` before commits
132 132 are pushed to a repo.
133 133 """
134 134 name = 'repo-pre-push'
135 135
136 136
137 137 class RepoPushEvent(RepoVCSEvent):
138 138 """
139 139 An instance of this class is emitted as an :term:`event` after commits
140 140 are pushed to a repo.
141 141
142 142 :param extras: (optional) dict of data from proxied VCS actions
143 143 """
144 144 name = 'repo-push'
145 145
146 146 def __init__(self, repo_name, pushed_commit_ids, extras):
147 147 super(RepoPushEvent, self).__init__(repo_name, extras)
148 148 self.pushed_commit_ids = pushed_commit_ids
149 149
@@ -1,1153 +1,1153 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 pull request model for RhodeCode
24 24 """
25 25
26 26 from collections import namedtuple
27 27 import json
28 28 import logging
29 29 import datetime
30 30
31 31 from pylons.i18n.translation import _
32 32 from pylons.i18n.translation import lazy_ugettext
33 33
34 34 import rhodecode
35 35 from rhodecode.lib import helpers as h, hooks_utils, diffs
36 36 from rhodecode.lib.compat import OrderedDict
37 37 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
38 38 from rhodecode.lib.markup_renderer import (
39 39 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
40 40 from rhodecode.lib.utils import action_logger
41 41 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
42 42 from rhodecode.lib.vcs.backends.base import (
43 43 Reference, MergeResponse, MergeFailureReason)
44 44 from rhodecode.lib.vcs.exceptions import (
45 45 CommitDoesNotExistError, EmptyRepositoryError)
46 46 from rhodecode.model import BaseModel
47 47 from rhodecode.model.changeset_status import ChangesetStatusModel
48 48 from rhodecode.model.comment import ChangesetCommentsModel
49 49 from rhodecode.model.db import (
50 50 PullRequest, PullRequestReviewers, Notification, ChangesetStatus,
51 51 PullRequestVersion, ChangesetComment)
52 52 from rhodecode.model.meta import Session
53 53 from rhodecode.model.notification import NotificationModel, \
54 54 EmailNotificationModel
55 55 from rhodecode.model.scm import ScmModel
56 56 from rhodecode.model.settings import VcsSettingsModel
57 57
58 58
59 59 log = logging.getLogger(__name__)
60 60
61 61
62 62 class PullRequestModel(BaseModel):
63 63
64 64 cls = PullRequest
65 65
66 66 DIFF_CONTEXT = 3
67 67
68 68 MERGE_STATUS_MESSAGES = {
69 69 MergeFailureReason.NONE: lazy_ugettext(
70 70 'This pull request can be automatically merged.'),
71 71 MergeFailureReason.UNKNOWN: lazy_ugettext(
72 72 'This pull request cannot be merged because of an unhandled'
73 73 ' exception.'),
74 74 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
75 75 'This pull request cannot be merged because of conflicts.'),
76 76 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
77 77 'This pull request could not be merged because push to target'
78 78 ' failed.'),
79 79 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
80 80 'This pull request cannot be merged because the target is not a'
81 81 ' head.'),
82 82 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
83 83 'This pull request cannot be merged because the source contains'
84 84 ' more branches than the target.'),
85 85 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
86 86 'This pull request cannot be merged because the target has'
87 87 ' multiple heads.'),
88 88 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
89 89 'This pull request cannot be merged because the target repository'
90 90 ' is locked.'),
91 91 MergeFailureReason.MISSING_COMMIT: lazy_ugettext(
92 92 'This pull request cannot be merged because the target or the '
93 93 'source reference is missing.'),
94 94 }
95 95
96 96 def __get_pull_request(self, pull_request):
97 97 return self._get_instance(PullRequest, pull_request)
98 98
99 99 def _check_perms(self, perms, pull_request, user, api=False):
100 100 if not api:
101 101 return h.HasRepoPermissionAny(*perms)(
102 102 user=user, repo_name=pull_request.target_repo.repo_name)
103 103 else:
104 104 return h.HasRepoPermissionAnyApi(*perms)(
105 105 user=user, repo_name=pull_request.target_repo.repo_name)
106 106
107 107 def check_user_read(self, pull_request, user, api=False):
108 108 _perms = ('repository.admin', 'repository.write', 'repository.read',)
109 109 return self._check_perms(_perms, pull_request, user, api)
110 110
111 111 def check_user_merge(self, pull_request, user, api=False):
112 112 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
113 113 return self._check_perms(_perms, pull_request, user, api)
114 114
115 115 def check_user_update(self, pull_request, user, api=False):
116 116 owner = user.user_id == pull_request.user_id
117 117 return self.check_user_merge(pull_request, user, api) or owner
118 118
119 119 def check_user_change_status(self, pull_request, user, api=False):
120 120 reviewer = user.user_id in [x.user_id for x in
121 121 pull_request.reviewers]
122 122 return self.check_user_update(pull_request, user, api) or reviewer
123 123
124 124 def get(self, pull_request):
125 125 return self.__get_pull_request(pull_request)
126 126
127 127 def _prepare_get_all_query(self, repo_name, source=False, statuses=None,
128 128 opened_by=None, order_by=None,
129 129 order_dir='desc'):
130 130 repo = self._get_repo(repo_name)
131 131 q = PullRequest.query()
132 132 # source or target
133 133 if source:
134 134 q = q.filter(PullRequest.source_repo == repo)
135 135 else:
136 136 q = q.filter(PullRequest.target_repo == repo)
137 137
138 138 # closed,opened
139 139 if statuses:
140 140 q = q.filter(PullRequest.status.in_(statuses))
141 141
142 142 # opened by filter
143 143 if opened_by:
144 144 q = q.filter(PullRequest.user_id.in_(opened_by))
145 145
146 146 if order_by:
147 147 order_map = {
148 148 'name_raw': PullRequest.pull_request_id,
149 149 'title': PullRequest.title,
150 150 'updated_on_raw': PullRequest.updated_on
151 151 }
152 152 if order_dir == 'asc':
153 153 q = q.order_by(order_map[order_by].asc())
154 154 else:
155 155 q = q.order_by(order_map[order_by].desc())
156 156
157 157 return q
158 158
159 159 def count_all(self, repo_name, source=False, statuses=None,
160 160 opened_by=None):
161 161 """
162 162 Count the number of pull requests for a specific repository.
163 163
164 164 :param repo_name: target or source repo
165 165 :param source: boolean flag to specify if repo_name refers to source
166 166 :param statuses: list of pull request statuses
167 167 :param opened_by: author user of the pull request
168 168 :returns: int number of pull requests
169 169 """
170 170 q = self._prepare_get_all_query(
171 171 repo_name, source=source, statuses=statuses, opened_by=opened_by)
172 172
173 173 return q.count()
174 174
175 175 def get_all(self, repo_name, source=False, statuses=None, opened_by=None,
176 176 offset=0, length=None, order_by=None, order_dir='desc'):
177 177 """
178 178 Get all pull requests for a specific repository.
179 179
180 180 :param repo_name: target or source repo
181 181 :param source: boolean flag to specify if repo_name refers to source
182 182 :param statuses: list of pull request statuses
183 183 :param opened_by: author user of the pull request
184 184 :param offset: pagination offset
185 185 :param length: length of returned list
186 186 :param order_by: order of the returned list
187 187 :param order_dir: 'asc' or 'desc' ordering direction
188 188 :returns: list of pull requests
189 189 """
190 190 q = self._prepare_get_all_query(
191 191 repo_name, source=source, statuses=statuses, opened_by=opened_by,
192 192 order_by=order_by, order_dir=order_dir)
193 193
194 194 if length:
195 195 pull_requests = q.limit(length).offset(offset).all()
196 196 else:
197 197 pull_requests = q.all()
198 198
199 199 return pull_requests
200 200
201 201 def count_awaiting_review(self, repo_name, source=False, statuses=None,
202 202 opened_by=None):
203 203 """
204 204 Count the number of pull requests for a specific repository that are
205 205 awaiting review.
206 206
207 207 :param repo_name: target or source repo
208 208 :param source: boolean flag to specify if repo_name refers to source
209 209 :param statuses: list of pull request statuses
210 210 :param opened_by: author user of the pull request
211 211 :returns: int number of pull requests
212 212 """
213 213 pull_requests = self.get_awaiting_review(
214 214 repo_name, source=source, statuses=statuses, opened_by=opened_by)
215 215
216 216 return len(pull_requests)
217 217
218 218 def get_awaiting_review(self, repo_name, source=False, statuses=None,
219 219 opened_by=None, offset=0, length=None,
220 220 order_by=None, order_dir='desc'):
221 221 """
222 222 Get all pull requests for a specific repository that are awaiting
223 223 review.
224 224
225 225 :param repo_name: target or source repo
226 226 :param source: boolean flag to specify if repo_name refers to source
227 227 :param statuses: list of pull request statuses
228 228 :param opened_by: author user of the pull request
229 229 :param offset: pagination offset
230 230 :param length: length of returned list
231 231 :param order_by: order of the returned list
232 232 :param order_dir: 'asc' or 'desc' ordering direction
233 233 :returns: list of pull requests
234 234 """
235 235 pull_requests = self.get_all(
236 236 repo_name, source=source, statuses=statuses, opened_by=opened_by,
237 237 order_by=order_by, order_dir=order_dir)
238 238
239 239 _filtered_pull_requests = []
240 240 for pr in pull_requests:
241 241 status = pr.calculated_review_status()
242 242 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
243 243 ChangesetStatus.STATUS_UNDER_REVIEW]:
244 244 _filtered_pull_requests.append(pr)
245 245 if length:
246 246 return _filtered_pull_requests[offset:offset+length]
247 247 else:
248 248 return _filtered_pull_requests
249 249
250 250 def count_awaiting_my_review(self, repo_name, source=False, statuses=None,
251 251 opened_by=None, user_id=None):
252 252 """
253 253 Count the number of pull requests for a specific repository that are
254 254 awaiting review from a specific user.
255 255
256 256 :param repo_name: target or source repo
257 257 :param source: boolean flag to specify if repo_name refers to source
258 258 :param statuses: list of pull request statuses
259 259 :param opened_by: author user of the pull request
260 260 :param user_id: reviewer user of the pull request
261 261 :returns: int number of pull requests
262 262 """
263 263 pull_requests = self.get_awaiting_my_review(
264 264 repo_name, source=source, statuses=statuses, opened_by=opened_by,
265 265 user_id=user_id)
266 266
267 267 return len(pull_requests)
268 268
269 269 def get_awaiting_my_review(self, repo_name, source=False, statuses=None,
270 270 opened_by=None, user_id=None, offset=0,
271 271 length=None, order_by=None, order_dir='desc'):
272 272 """
273 273 Get all pull requests for a specific repository that are awaiting
274 274 review from a specific user.
275 275
276 276 :param repo_name: target or source repo
277 277 :param source: boolean flag to specify if repo_name refers to source
278 278 :param statuses: list of pull request statuses
279 279 :param opened_by: author user of the pull request
280 280 :param user_id: reviewer user of the pull request
281 281 :param offset: pagination offset
282 282 :param length: length of returned list
283 283 :param order_by: order of the returned list
284 284 :param order_dir: 'asc' or 'desc' ordering direction
285 285 :returns: list of pull requests
286 286 """
287 287 pull_requests = self.get_all(
288 288 repo_name, source=source, statuses=statuses, opened_by=opened_by,
289 289 order_by=order_by, order_dir=order_dir)
290 290
291 291 _my = PullRequestModel().get_not_reviewed(user_id)
292 292 my_participation = []
293 293 for pr in pull_requests:
294 294 if pr in _my:
295 295 my_participation.append(pr)
296 296 _filtered_pull_requests = my_participation
297 297 if length:
298 298 return _filtered_pull_requests[offset:offset+length]
299 299 else:
300 300 return _filtered_pull_requests
301 301
302 302 def get_not_reviewed(self, user_id):
303 303 return [
304 304 x.pull_request for x in PullRequestReviewers.query().filter(
305 305 PullRequestReviewers.user_id == user_id).all()
306 306 ]
307 307
308 308 def get_versions(self, pull_request):
309 309 """
310 310 returns version of pull request sorted by ID descending
311 311 """
312 312 return PullRequestVersion.query()\
313 313 .filter(PullRequestVersion.pull_request == pull_request)\
314 314 .order_by(PullRequestVersion.pull_request_version_id.asc())\
315 315 .all()
316 316
317 317 def create(self, created_by, source_repo, source_ref, target_repo,
318 318 target_ref, revisions, reviewers, title, description=None):
319 319 created_by_user = self._get_user(created_by)
320 320 source_repo = self._get_repo(source_repo)
321 321 target_repo = self._get_repo(target_repo)
322 322
323 323 pull_request = PullRequest()
324 324 pull_request.source_repo = source_repo
325 325 pull_request.source_ref = source_ref
326 326 pull_request.target_repo = target_repo
327 327 pull_request.target_ref = target_ref
328 328 pull_request.revisions = revisions
329 329 pull_request.title = title
330 330 pull_request.description = description
331 331 pull_request.author = created_by_user
332 332
333 333 Session().add(pull_request)
334 334 Session().flush()
335 335
336 336 # members / reviewers
337 337 for user_id in set(reviewers):
338 338 user = self._get_user(user_id)
339 339 reviewer = PullRequestReviewers(user, pull_request)
340 340 Session().add(reviewer)
341 341
342 342 # Set approval status to "Under Review" for all commits which are
343 343 # part of this pull request.
344 344 ChangesetStatusModel().set_status(
345 345 repo=target_repo,
346 346 status=ChangesetStatus.STATUS_UNDER_REVIEW,
347 347 user=created_by_user,
348 348 pull_request=pull_request
349 349 )
350 350
351 351 self.notify_reviewers(pull_request, reviewers)
352 352 self._trigger_pull_request_hook(
353 353 pull_request, created_by_user, 'create')
354 354
355 355 return pull_request
356 356
357 357 def _trigger_pull_request_hook(self, pull_request, user, action):
358 358 pull_request = self.__get_pull_request(pull_request)
359 359 target_scm = pull_request.target_repo.scm_instance()
360 360 if action == 'create':
361 361 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
362 362 elif action == 'merge':
363 363 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
364 364 elif action == 'close':
365 365 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
366 366 elif action == 'review_status_change':
367 367 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
368 368 elif action == 'update':
369 369 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
370 370 else:
371 371 return
372 372
373 373 trigger_hook(
374 374 username=user.username,
375 375 repo_name=pull_request.target_repo.repo_name,
376 376 repo_alias=target_scm.alias,
377 377 pull_request=pull_request)
378 378
379 379 def _get_commit_ids(self, pull_request):
380 380 """
381 381 Return the commit ids of the merged pull request.
382 382
383 383 This method is not dealing correctly yet with the lack of autoupdates
384 384 nor with the implicit target updates.
385 385 For example: if a commit in the source repo is already in the target it
386 386 will be reported anyways.
387 387 """
388 388 merge_rev = pull_request.merge_rev
389 389 if merge_rev is None:
390 390 raise ValueError('This pull request was not merged yet')
391 391
392 392 commit_ids = list(pull_request.revisions)
393 393 if merge_rev not in commit_ids:
394 394 commit_ids.append(merge_rev)
395 395
396 396 return commit_ids
397 397
398 398 def merge(self, pull_request, user, extras):
399 399 log.debug("Merging pull request %s", pull_request.pull_request_id)
400 400 merge_state = self._merge_pull_request(pull_request, user, extras)
401 401 if merge_state.executed:
402 402 log.debug(
403 403 "Merge was successful, updating the pull request comments.")
404 404 self._comment_and_close_pr(pull_request, user, merge_state)
405 405 self._log_action('user_merged_pull_request', user, pull_request)
406 406 else:
407 407 log.warn("Merge failed, not updating the pull request.")
408 408 return merge_state
409 409
410 410 def _merge_pull_request(self, pull_request, user, extras):
411 411 target_vcs = pull_request.target_repo.scm_instance()
412 412 source_vcs = pull_request.source_repo.scm_instance()
413 413 target_ref = self._refresh_reference(
414 414 pull_request.target_ref_parts, target_vcs)
415 415
416 416 message = _(
417 417 'Merge pull request #%(pr_id)s from '
418 418 '%(source_repo)s %(source_ref_name)s\n\n %(pr_title)s') % {
419 419 'pr_id': pull_request.pull_request_id,
420 420 'source_repo': source_vcs.name,
421 421 'source_ref_name': pull_request.source_ref_parts.name,
422 422 'pr_title': pull_request.title
423 423 }
424 424
425 425 workspace_id = self._workspace_id(pull_request)
426 426 protocol = rhodecode.CONFIG.get('vcs.hooks.protocol')
427 427 use_direct_calls = rhodecode.CONFIG.get('vcs.hooks.direct_calls')
428 428 use_rebase = self._use_rebase_for_merging(pull_request)
429 429
430 430 callback_daemon, extras = prepare_callback_daemon(
431 431 extras, protocol=protocol, use_direct_calls=use_direct_calls)
432 432
433 433 with callback_daemon:
434 434 # TODO: johbo: Implement a clean way to run a config_override
435 435 # for a single call.
436 436 target_vcs.config.set(
437 437 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
438 438 merge_state = target_vcs.merge(
439 439 target_ref, source_vcs, pull_request.source_ref_parts,
440 440 workspace_id, user_name=user.username,
441 441 user_email=user.email, message=message, use_rebase=use_rebase)
442 442 return merge_state
443 443
444 444 def _comment_and_close_pr(self, pull_request, user, merge_state):
445 445 pull_request.merge_rev = merge_state.merge_commit_id
446 446 pull_request.updated_on = datetime.datetime.now()
447 447
448 448 ChangesetCommentsModel().create(
449 449 text=unicode(_('Pull request merged and closed')),
450 450 repo=pull_request.target_repo.repo_id,
451 451 user=user.user_id,
452 452 pull_request=pull_request.pull_request_id,
453 453 f_path=None,
454 454 line_no=None,
455 455 closing_pr=True
456 456 )
457 457
458 458 Session().add(pull_request)
459 459 Session().flush()
460 460 # TODO: paris: replace invalidation with less radical solution
461 461 ScmModel().mark_for_invalidation(
462 462 pull_request.target_repo.repo_name)
463 463 self._trigger_pull_request_hook(pull_request, user, 'merge')
464 464
465 465 def has_valid_update_type(self, pull_request):
466 466 source_ref_type = pull_request.source_ref_parts.type
467 467 return source_ref_type in ['book', 'branch', 'tag']
468 468
469 469 def update_commits(self, pull_request):
470 470 """
471 471 Get the updated list of commits for the pull request
472 472 and return the new pull request version and the list
473 473 of commits processed by this update action
474 474 """
475 475
476 476 pull_request = self.__get_pull_request(pull_request)
477 477 source_ref_type = pull_request.source_ref_parts.type
478 478 source_ref_name = pull_request.source_ref_parts.name
479 479 source_ref_id = pull_request.source_ref_parts.commit_id
480 480
481 481 if not self.has_valid_update_type(pull_request):
482 482 log.debug(
483 483 "Skipping update of pull request %s due to ref type: %s",
484 484 pull_request, source_ref_type)
485 485 return (None, None)
486 486
487 487 source_repo = pull_request.source_repo.scm_instance()
488 488 source_commit = source_repo.get_commit(commit_id=source_ref_name)
489 489 if source_ref_id == source_commit.raw_id:
490 490 log.debug("Nothing changed in pull request %s", pull_request)
491 491 return (None, None)
492 492
493 493 # Finally there is a need for an update
494 494 pull_request_version = self._create_version_from_snapshot(pull_request)
495 495 self._link_comments_to_version(pull_request_version)
496 496
497 497 target_ref_type = pull_request.target_ref_parts.type
498 498 target_ref_name = pull_request.target_ref_parts.name
499 499 target_ref_id = pull_request.target_ref_parts.commit_id
500 500 target_repo = pull_request.target_repo.scm_instance()
501 501
502 502 if target_ref_type in ('tag', 'branch', 'book'):
503 503 target_commit = target_repo.get_commit(target_ref_name)
504 504 else:
505 505 target_commit = target_repo.get_commit(target_ref_id)
506 506
507 507 # re-compute commit ids
508 508 old_commit_ids = set(pull_request.revisions)
509 509 pre_load = ["author", "branch", "date", "message"]
510 510 commit_ranges = target_repo.compare(
511 511 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
512 512 pre_load=pre_load)
513 513
514 514 ancestor = target_repo.get_common_ancestor(
515 515 target_commit.raw_id, source_commit.raw_id, source_repo)
516 516
517 517 pull_request.source_ref = '%s:%s:%s' % (
518 518 source_ref_type, source_ref_name, source_commit.raw_id)
519 519 pull_request.target_ref = '%s:%s:%s' % (
520 520 target_ref_type, target_ref_name, ancestor)
521 521 pull_request.revisions = [
522 522 commit.raw_id for commit in reversed(commit_ranges)]
523 523 pull_request.updated_on = datetime.datetime.now()
524 524 Session().add(pull_request)
525 525 new_commit_ids = set(pull_request.revisions)
526 526
527 527 changes = self._calculate_commit_id_changes(
528 528 old_commit_ids, new_commit_ids)
529 529
530 530 old_diff_data, new_diff_data = self._generate_update_diffs(
531 531 pull_request, pull_request_version)
532 532
533 533 ChangesetCommentsModel().outdate_comments(
534 534 pull_request, old_diff_data=old_diff_data,
535 535 new_diff_data=new_diff_data)
536 536
537 537 file_changes = self._calculate_file_changes(
538 538 old_diff_data, new_diff_data)
539 539
540 540 # Add an automatic comment to the pull request
541 541 update_comment = ChangesetCommentsModel().create(
542 542 text=self._render_update_message(changes, file_changes),
543 543 repo=pull_request.target_repo,
544 544 user=pull_request.author,
545 545 pull_request=pull_request,
546 546 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
547 547
548 548 # Update status to "Under Review" for added commits
549 549 for commit_id in changes.added:
550 550 ChangesetStatusModel().set_status(
551 551 repo=pull_request.source_repo,
552 552 status=ChangesetStatus.STATUS_UNDER_REVIEW,
553 553 comment=update_comment,
554 554 user=pull_request.author,
555 555 pull_request=pull_request,
556 556 revision=commit_id)
557 557
558 558 log.debug(
559 559 'Updated pull request %s, added_ids: %s, common_ids: %s, '
560 560 'removed_ids: %s', pull_request.pull_request_id,
561 561 changes.added, changes.common, changes.removed)
562 562 log.debug('Updated pull request with the following file changes: %s',
563 563 file_changes)
564 564
565 565 log.info(
566 566 "Updated pull request %s from commit %s to commit %s, "
567 567 "stored new version %s of this pull request.",
568 568 pull_request.pull_request_id, source_ref_id,
569 569 pull_request.source_ref_parts.commit_id,
570 570 pull_request_version.pull_request_version_id)
571 571 Session().commit()
572 572 self._trigger_pull_request_hook(pull_request, pull_request.author,
573 573 'update')
574 574 return (pull_request_version, changes)
575 575
576 576 def _create_version_from_snapshot(self, pull_request):
577 577 version = PullRequestVersion()
578 578 version.title = pull_request.title
579 579 version.description = pull_request.description
580 580 version.status = pull_request.status
581 581 version.created_on = pull_request.created_on
582 582 version.updated_on = pull_request.updated_on
583 583 version.user_id = pull_request.user_id
584 584 version.source_repo = pull_request.source_repo
585 585 version.source_ref = pull_request.source_ref
586 586 version.target_repo = pull_request.target_repo
587 587 version.target_ref = pull_request.target_ref
588 588
589 589 version._last_merge_source_rev = pull_request._last_merge_source_rev
590 590 version._last_merge_target_rev = pull_request._last_merge_target_rev
591 591 version._last_merge_status = pull_request._last_merge_status
592 592 version.merge_rev = pull_request.merge_rev
593 593
594 594 version.revisions = pull_request.revisions
595 595 version.pull_request = pull_request
596 596 Session().add(version)
597 597 Session().flush()
598 598
599 599 return version
600 600
601 601 def _generate_update_diffs(self, pull_request, pull_request_version):
602 602 diff_context = (
603 603 self.DIFF_CONTEXT +
604 604 ChangesetCommentsModel.needed_extra_diff_context())
605 605 old_diff = self._get_diff_from_pr_or_version(
606 606 pull_request_version, context=diff_context)
607 607 new_diff = self._get_diff_from_pr_or_version(
608 608 pull_request, context=diff_context)
609 609
610 610 old_diff_data = diffs.DiffProcessor(old_diff)
611 611 old_diff_data.prepare()
612 612 new_diff_data = diffs.DiffProcessor(new_diff)
613 613 new_diff_data.prepare()
614 614
615 615 return old_diff_data, new_diff_data
616 616
617 617 def _link_comments_to_version(self, pull_request_version):
618 618 """
619 619 Link all unlinked comments of this pull request to the given version.
620 620
621 621 :param pull_request_version: The `PullRequestVersion` to which
622 622 the comments shall be linked.
623 623
624 624 """
625 625 pull_request = pull_request_version.pull_request
626 626 comments = ChangesetComment.query().filter(
627 627 # TODO: johbo: Should we query for the repo at all here?
628 628 # Pending decision on how comments of PRs are to be related
629 629 # to either the source repo, the target repo or no repo at all.
630 630 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
631 631 ChangesetComment.pull_request == pull_request,
632 632 ChangesetComment.pull_request_version == None)
633 633
634 634 # TODO: johbo: Find out why this breaks if it is done in a bulk
635 635 # operation.
636 636 for comment in comments:
637 637 comment.pull_request_version_id = (
638 638 pull_request_version.pull_request_version_id)
639 639 Session().add(comment)
640 640
641 641 def _calculate_commit_id_changes(self, old_ids, new_ids):
642 642 added = new_ids.difference(old_ids)
643 643 common = old_ids.intersection(new_ids)
644 644 removed = old_ids.difference(new_ids)
645 645 return ChangeTuple(added, common, removed)
646 646
647 647 def _calculate_file_changes(self, old_diff_data, new_diff_data):
648 648
649 649 old_files = OrderedDict()
650 650 for diff_data in old_diff_data.parsed_diff:
651 651 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
652 652
653 653 added_files = []
654 654 modified_files = []
655 655 removed_files = []
656 656 for diff_data in new_diff_data.parsed_diff:
657 657 new_filename = diff_data['filename']
658 658 new_hash = md5_safe(diff_data['raw_diff'])
659 659
660 660 old_hash = old_files.get(new_filename)
661 661 if not old_hash:
662 662 # file is not present in old diff, means it's added
663 663 added_files.append(new_filename)
664 664 else:
665 665 if new_hash != old_hash:
666 666 modified_files.append(new_filename)
667 667 # now remove a file from old, since we have seen it already
668 668 del old_files[new_filename]
669 669
670 670 # removed files is when there are present in old, but not in NEW,
671 671 # since we remove old files that are present in new diff, left-overs
672 672 # if any should be the removed files
673 673 removed_files.extend(old_files.keys())
674 674
675 675 return FileChangeTuple(added_files, modified_files, removed_files)
676 676
677 677 def _render_update_message(self, changes, file_changes):
678 678 """
679 679 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
680 680 so it's always looking the same disregarding on which default
681 681 renderer system is using.
682 682
683 683 :param changes: changes named tuple
684 684 :param file_changes: file changes named tuple
685 685
686 686 """
687 687 new_status = ChangesetStatus.get_status_lbl(
688 688 ChangesetStatus.STATUS_UNDER_REVIEW)
689 689
690 690 changed_files = (
691 691 file_changes.added + file_changes.modified + file_changes.removed)
692 692
693 693 params = {
694 694 'under_review_label': new_status,
695 695 'added_commits': changes.added,
696 696 'removed_commits': changes.removed,
697 697 'changed_files': changed_files,
698 698 'added_files': file_changes.added,
699 699 'modified_files': file_changes.modified,
700 700 'removed_files': file_changes.removed,
701 701 }
702 702 renderer = RstTemplateRenderer()
703 703 return renderer.render('pull_request_update.mako', **params)
704 704
705 705 def edit(self, pull_request, title, description):
706 706 pull_request = self.__get_pull_request(pull_request)
707 707 if pull_request.is_closed():
708 708 raise ValueError('This pull request is closed')
709 709 if title:
710 710 pull_request.title = title
711 711 pull_request.description = description
712 712 pull_request.updated_on = datetime.datetime.now()
713 713 Session().add(pull_request)
714 714
715 715 def update_reviewers(self, pull_request, reviewers_ids):
716 716 reviewers_ids = set(reviewers_ids)
717 717 pull_request = self.__get_pull_request(pull_request)
718 718 current_reviewers = PullRequestReviewers.query()\
719 719 .filter(PullRequestReviewers.pull_request ==
720 720 pull_request).all()
721 721 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
722 722
723 723 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
724 724 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
725 725
726 726 log.debug("Adding %s reviewers", ids_to_add)
727 727 log.debug("Removing %s reviewers", ids_to_remove)
728 728 changed = False
729 729 for uid in ids_to_add:
730 730 changed = True
731 731 _usr = self._get_user(uid)
732 732 reviewer = PullRequestReviewers(_usr, pull_request)
733 733 Session().add(reviewer)
734 734
735 735 self.notify_reviewers(pull_request, ids_to_add)
736 736
737 737 for uid in ids_to_remove:
738 738 changed = True
739 739 reviewer = PullRequestReviewers.query()\
740 740 .filter(PullRequestReviewers.user_id == uid,
741 741 PullRequestReviewers.pull_request == pull_request)\
742 742 .scalar()
743 743 if reviewer:
744 744 Session().delete(reviewer)
745 745 if changed:
746 746 pull_request.updated_on = datetime.datetime.now()
747 747 Session().add(pull_request)
748 748
749 749 return ids_to_add, ids_to_remove
750 750
751 751 def get_url(self, pull_request):
752 return url('pullrequest_show', repo_name=self.target_repo.repo_name,
752 return h.url('pullrequest_show', repo_name=self.target_repo.repo_name,
753 753 pull_request_id=self.pull_request_id,
754 754 qualified=True)
755 755
756 756 def notify_reviewers(self, pull_request, reviewers_ids):
757 757 # notification to reviewers
758 758 if not reviewers_ids:
759 759 return
760 760
761 761 pull_request_obj = pull_request
762 762 # get the current participants of this pull request
763 763 recipients = reviewers_ids
764 764 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
765 765
766 766 pr_source_repo = pull_request_obj.source_repo
767 767 pr_target_repo = pull_request_obj.target_repo
768 768
769 769 pr_url = h.url(
770 770 'pullrequest_show',
771 771 repo_name=pr_target_repo.repo_name,
772 772 pull_request_id=pull_request_obj.pull_request_id,
773 773 qualified=True,)
774 774
775 775 # set some variables for email notification
776 776 pr_target_repo_url = h.url(
777 777 'summary_home',
778 778 repo_name=pr_target_repo.repo_name,
779 779 qualified=True)
780 780
781 781 pr_source_repo_url = h.url(
782 782 'summary_home',
783 783 repo_name=pr_source_repo.repo_name,
784 784 qualified=True)
785 785
786 786 # pull request specifics
787 787 pull_request_commits = [
788 788 (x.raw_id, x.message)
789 789 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
790 790
791 791 kwargs = {
792 792 'user': pull_request.author,
793 793 'pull_request': pull_request_obj,
794 794 'pull_request_commits': pull_request_commits,
795 795
796 796 'pull_request_target_repo': pr_target_repo,
797 797 'pull_request_target_repo_url': pr_target_repo_url,
798 798
799 799 'pull_request_source_repo': pr_source_repo,
800 800 'pull_request_source_repo_url': pr_source_repo_url,
801 801
802 802 'pull_request_url': pr_url,
803 803 }
804 804
805 805 # pre-generate the subject for notification itself
806 806 (subject,
807 807 _h, _e, # we don't care about those
808 808 body_plaintext) = EmailNotificationModel().render_email(
809 809 notification_type, **kwargs)
810 810
811 811 # create notification objects, and emails
812 812 NotificationModel().create(
813 813 created_by=pull_request.author,
814 814 notification_subject=subject,
815 815 notification_body=body_plaintext,
816 816 notification_type=notification_type,
817 817 recipients=recipients,
818 818 email_kwargs=kwargs,
819 819 )
820 820
821 821 def delete(self, pull_request):
822 822 pull_request = self.__get_pull_request(pull_request)
823 823 self._cleanup_merge_workspace(pull_request)
824 824 Session().delete(pull_request)
825 825
826 826 def close_pull_request(self, pull_request, user):
827 827 pull_request = self.__get_pull_request(pull_request)
828 828 self._cleanup_merge_workspace(pull_request)
829 829 pull_request.status = PullRequest.STATUS_CLOSED
830 830 pull_request.updated_on = datetime.datetime.now()
831 831 Session().add(pull_request)
832 832 self._trigger_pull_request_hook(
833 833 pull_request, pull_request.author, 'close')
834 834 self._log_action('user_closed_pull_request', user, pull_request)
835 835
836 836 def close_pull_request_with_comment(self, pull_request, user, repo,
837 837 message=None):
838 838 status = ChangesetStatus.STATUS_REJECTED
839 839
840 840 if not message:
841 841 message = (
842 842 _('Status change %(transition_icon)s %(status)s') % {
843 843 'transition_icon': '>',
844 844 'status': ChangesetStatus.get_status_lbl(status)})
845 845
846 846 internal_message = _('Closing with') + ' ' + message
847 847
848 848 comm = ChangesetCommentsModel().create(
849 849 text=internal_message,
850 850 repo=repo.repo_id,
851 851 user=user.user_id,
852 852 pull_request=pull_request.pull_request_id,
853 853 f_path=None,
854 854 line_no=None,
855 855 status_change=ChangesetStatus.get_status_lbl(status),
856 856 closing_pr=True
857 857 )
858 858
859 859 ChangesetStatusModel().set_status(
860 860 repo.repo_id,
861 861 status,
862 862 user.user_id,
863 863 comm,
864 864 pull_request=pull_request.pull_request_id
865 865 )
866 866 Session().flush()
867 867
868 868 PullRequestModel().close_pull_request(
869 869 pull_request.pull_request_id, user)
870 870
871 871 def merge_status(self, pull_request):
872 872 if not self._is_merge_enabled(pull_request):
873 873 return False, _('Server-side pull request merging is disabled.')
874 874 if pull_request.is_closed():
875 875 return False, _('This pull request is closed.')
876 876 merge_possible, msg = self._check_repo_requirements(
877 877 target=pull_request.target_repo, source=pull_request.source_repo)
878 878 if not merge_possible:
879 879 return merge_possible, msg
880 880
881 881 try:
882 882 resp = self._try_merge(pull_request)
883 883 status = resp.possible, self.merge_status_message(
884 884 resp.failure_reason)
885 885 except NotImplementedError:
886 886 status = False, _('Pull request merging is not supported.')
887 887
888 888 return status
889 889
890 890 def _check_repo_requirements(self, target, source):
891 891 """
892 892 Check if `target` and `source` have compatible requirements.
893 893
894 894 Currently this is just checking for largefiles.
895 895 """
896 896 target_has_largefiles = self._has_largefiles(target)
897 897 source_has_largefiles = self._has_largefiles(source)
898 898 merge_possible = True
899 899 message = u''
900 900
901 901 if target_has_largefiles != source_has_largefiles:
902 902 merge_possible = False
903 903 if source_has_largefiles:
904 904 message = _(
905 905 'Target repository large files support is disabled.')
906 906 else:
907 907 message = _(
908 908 'Source repository large files support is disabled.')
909 909
910 910 return merge_possible, message
911 911
912 912 def _has_largefiles(self, repo):
913 913 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
914 914 'extensions', 'largefiles')
915 915 return largefiles_ui and largefiles_ui[0].active
916 916
917 917 def _try_merge(self, pull_request):
918 918 """
919 919 Try to merge the pull request and return the merge status.
920 920 """
921 921 log.debug(
922 922 "Trying out if the pull request %s can be merged.",
923 923 pull_request.pull_request_id)
924 924 target_vcs = pull_request.target_repo.scm_instance()
925 925 target_ref = self._refresh_reference(
926 926 pull_request.target_ref_parts, target_vcs)
927 927
928 928 target_locked = pull_request.target_repo.locked
929 929 if target_locked and target_locked[0]:
930 930 log.debug("The target repository is locked.")
931 931 merge_state = MergeResponse(
932 932 False, False, None, MergeFailureReason.TARGET_IS_LOCKED)
933 933 elif self._needs_merge_state_refresh(pull_request, target_ref):
934 934 log.debug("Refreshing the merge status of the repository.")
935 935 merge_state = self._refresh_merge_state(
936 936 pull_request, target_vcs, target_ref)
937 937 else:
938 938 possible = pull_request.\
939 939 _last_merge_status == MergeFailureReason.NONE
940 940 merge_state = MergeResponse(
941 941 possible, False, None, pull_request._last_merge_status)
942 942 log.debug("Merge response: %s", merge_state)
943 943 return merge_state
944 944
945 945 def _refresh_reference(self, reference, vcs_repository):
946 946 if reference.type in ('branch', 'book'):
947 947 name_or_id = reference.name
948 948 else:
949 949 name_or_id = reference.commit_id
950 950 refreshed_commit = vcs_repository.get_commit(name_or_id)
951 951 refreshed_reference = Reference(
952 952 reference.type, reference.name, refreshed_commit.raw_id)
953 953 return refreshed_reference
954 954
955 955 def _needs_merge_state_refresh(self, pull_request, target_reference):
956 956 return not(
957 957 pull_request.revisions and
958 958 pull_request.revisions[0] == pull_request._last_merge_source_rev and
959 959 target_reference.commit_id == pull_request._last_merge_target_rev)
960 960
961 961 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
962 962 workspace_id = self._workspace_id(pull_request)
963 963 source_vcs = pull_request.source_repo.scm_instance()
964 964 use_rebase = self._use_rebase_for_merging(pull_request)
965 965 merge_state = target_vcs.merge(
966 966 target_reference, source_vcs, pull_request.source_ref_parts,
967 967 workspace_id, dry_run=True, use_rebase=use_rebase)
968 968
969 969 # Do not store the response if there was an unknown error.
970 970 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
971 971 pull_request._last_merge_source_rev = pull_request.\
972 972 source_ref_parts.commit_id
973 973 pull_request._last_merge_target_rev = target_reference.commit_id
974 974 pull_request._last_merge_status = (
975 975 merge_state.failure_reason)
976 976 Session().add(pull_request)
977 977 Session().flush()
978 978
979 979 return merge_state
980 980
981 981 def _workspace_id(self, pull_request):
982 982 workspace_id = 'pr-%s' % pull_request.pull_request_id
983 983 return workspace_id
984 984
985 985 def merge_status_message(self, status_code):
986 986 """
987 987 Return a human friendly error message for the given merge status code.
988 988 """
989 989 return self.MERGE_STATUS_MESSAGES[status_code]
990 990
991 991 def generate_repo_data(self, repo, commit_id=None, branch=None,
992 992 bookmark=None):
993 993 all_refs, selected_ref = \
994 994 self._get_repo_pullrequest_sources(
995 995 repo.scm_instance(), commit_id=commit_id,
996 996 branch=branch, bookmark=bookmark)
997 997
998 998 refs_select2 = []
999 999 for element in all_refs:
1000 1000 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1001 1001 refs_select2.append({'text': element[1], 'children': children})
1002 1002
1003 1003 return {
1004 1004 'user': {
1005 1005 'user_id': repo.user.user_id,
1006 1006 'username': repo.user.username,
1007 1007 'firstname': repo.user.firstname,
1008 1008 'lastname': repo.user.lastname,
1009 1009 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1010 1010 },
1011 1011 'description': h.chop_at_smart(repo.description, '\n'),
1012 1012 'refs': {
1013 1013 'all_refs': all_refs,
1014 1014 'selected_ref': selected_ref,
1015 1015 'select2_refs': refs_select2
1016 1016 }
1017 1017 }
1018 1018
1019 1019 def generate_pullrequest_title(self, source, source_ref, target):
1020 1020 return '{source}#{at_ref} to {target}'.format(
1021 1021 source=source,
1022 1022 at_ref=source_ref,
1023 1023 target=target,
1024 1024 )
1025 1025
1026 1026 def _cleanup_merge_workspace(self, pull_request):
1027 1027 # Merging related cleanup
1028 1028 target_scm = pull_request.target_repo.scm_instance()
1029 1029 workspace_id = 'pr-%s' % pull_request.pull_request_id
1030 1030
1031 1031 try:
1032 1032 target_scm.cleanup_merge_workspace(workspace_id)
1033 1033 except NotImplementedError:
1034 1034 pass
1035 1035
1036 1036 def _get_repo_pullrequest_sources(
1037 1037 self, repo, commit_id=None, branch=None, bookmark=None):
1038 1038 """
1039 1039 Return a structure with repo's interesting commits, suitable for
1040 1040 the selectors in pullrequest controller
1041 1041
1042 1042 :param commit_id: a commit that must be in the list somehow
1043 1043 and selected by default
1044 1044 :param branch: a branch that must be in the list and selected
1045 1045 by default - even if closed
1046 1046 :param bookmark: a bookmark that must be in the list and selected
1047 1047 """
1048 1048
1049 1049 commit_id = safe_str(commit_id) if commit_id else None
1050 1050 branch = safe_str(branch) if branch else None
1051 1051 bookmark = safe_str(bookmark) if bookmark else None
1052 1052
1053 1053 selected = None
1054 1054
1055 1055 # order matters: first source that has commit_id in it will be selected
1056 1056 sources = []
1057 1057 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1058 1058 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1059 1059
1060 1060 if commit_id:
1061 1061 ref_commit = (h.short_id(commit_id), commit_id)
1062 1062 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1063 1063
1064 1064 sources.append(
1065 1065 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1066 1066 )
1067 1067
1068 1068 groups = []
1069 1069 for group_key, ref_list, group_name, match in sources:
1070 1070 group_refs = []
1071 1071 for ref_name, ref_id in ref_list:
1072 1072 ref_key = '%s:%s:%s' % (group_key, ref_name, ref_id)
1073 1073 group_refs.append((ref_key, ref_name))
1074 1074
1075 1075 if not selected:
1076 1076 if set([commit_id, match]) & set([ref_id, ref_name]):
1077 1077 selected = ref_key
1078 1078
1079 1079 if group_refs:
1080 1080 groups.append((group_refs, group_name))
1081 1081
1082 1082 if not selected:
1083 1083 ref = commit_id or branch or bookmark
1084 1084 if ref:
1085 1085 raise CommitDoesNotExistError(
1086 1086 'No commit refs could be found matching: %s' % ref)
1087 1087 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1088 1088 selected = 'branch:%s:%s' % (
1089 1089 repo.DEFAULT_BRANCH_NAME,
1090 1090 repo.branches[repo.DEFAULT_BRANCH_NAME]
1091 1091 )
1092 1092 elif repo.commit_ids:
1093 1093 rev = repo.commit_ids[0]
1094 1094 selected = 'rev:%s:%s' % (rev, rev)
1095 1095 else:
1096 1096 raise EmptyRepositoryError()
1097 1097 return groups, selected
1098 1098
1099 1099 def get_diff(self, pull_request, context=DIFF_CONTEXT):
1100 1100 pull_request = self.__get_pull_request(pull_request)
1101 1101 return self._get_diff_from_pr_or_version(pull_request, context=context)
1102 1102
1103 1103 def _get_diff_from_pr_or_version(self, pr_or_version, context):
1104 1104 source_repo = pr_or_version.source_repo
1105 1105
1106 1106 # we swap org/other ref since we run a simple diff on one repo
1107 1107 target_ref_id = pr_or_version.target_ref_parts.commit_id
1108 1108 source_ref_id = pr_or_version.source_ref_parts.commit_id
1109 1109 target_commit = source_repo.get_commit(
1110 1110 commit_id=safe_str(target_ref_id))
1111 1111 source_commit = source_repo.get_commit(commit_id=safe_str(source_ref_id))
1112 1112 vcs_repo = source_repo.scm_instance()
1113 1113
1114 1114 # TODO: johbo: In the context of an update, we cannot reach
1115 1115 # the old commit anymore with our normal mechanisms. It needs
1116 1116 # some sort of special support in the vcs layer to avoid this
1117 1117 # workaround.
1118 1118 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1119 1119 vcs_repo.alias == 'git'):
1120 1120 source_commit.raw_id = safe_str(source_ref_id)
1121 1121
1122 1122 log.debug('calculating diff between '
1123 1123 'source_ref:%s and target_ref:%s for repo `%s`',
1124 1124 target_ref_id, source_ref_id,
1125 1125 safe_unicode(vcs_repo.path))
1126 1126
1127 1127 vcs_diff = vcs_repo.get_diff(
1128 1128 commit1=target_commit, commit2=source_commit, context=context)
1129 1129 return vcs_diff
1130 1130
1131 1131 def _is_merge_enabled(self, pull_request):
1132 1132 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1133 1133 settings = settings_model.get_general_settings()
1134 1134 return settings.get('rhodecode_pr_merge_enabled', False)
1135 1135
1136 1136 def _use_rebase_for_merging(self, pull_request):
1137 1137 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1138 1138 settings = settings_model.get_general_settings()
1139 1139 return settings.get('rhodecode_hg_use_rebase_for_merging', False)
1140 1140
1141 1141 def _log_action(self, action, user, pull_request):
1142 1142 action_logger(
1143 1143 user,
1144 1144 '{action}:{pr_id}'.format(
1145 1145 action=action, pr_id=pull_request.pull_request_id),
1146 1146 pull_request.target_repo)
1147 1147
1148 1148
1149 1149 ChangeTuple = namedtuple('ChangeTuple',
1150 1150 ['added', 'common', 'removed'])
1151 1151
1152 1152 FileChangeTuple = namedtuple('FileChangeTuple',
1153 1153 ['added', 'modified', 'removed'])
@@ -1,934 +1,934 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Repository model for rhodecode
23 23 """
24 24
25 25 import logging
26 26 import os
27 27 import re
28 28 import shutil
29 29 import time
30 30 import traceback
31 31 from datetime import datetime
32 32
33 33 from sqlalchemy.sql import func
34 34 from sqlalchemy.sql.expression import true, or_
35 35 from zope.cachedescriptors.property import Lazy as LazyProperty
36 36
37 37 from rhodecode import events
38 38 from rhodecode.lib import helpers as h
39 39 from rhodecode.lib.auth import HasUserGroupPermissionAny
40 40 from rhodecode.lib.caching_query import FromCache
41 41 from rhodecode.lib.exceptions import AttachedForksError
42 42 from rhodecode.lib.hooks_base import log_delete_repository
43 43 from rhodecode.lib.utils import make_db_config
44 44 from rhodecode.lib.utils2 import (
45 45 safe_str, safe_unicode, remove_prefix, obfuscate_url_pw,
46 46 get_current_rhodecode_user, safe_int, datetime_to_time, action_logger_generic)
47 47 from rhodecode.lib.vcs.backends import get_backend
48 48 from rhodecode.model import BaseModel
49 49 from rhodecode.model.db import (
50 50 Repository, UserRepoToPerm, UserGroupRepoToPerm, UserRepoGroupToPerm,
51 51 UserGroupRepoGroupToPerm, User, Permission, Statistics, UserGroup,
52 52 RepoGroup, RepositoryField)
53 53 from rhodecode.model.scm import UserGroupList
54 54 from rhodecode.model.settings import VcsSettingsModel
55 55
56 56
57 57 log = logging.getLogger(__name__)
58 58
59 59
60 60 class RepoModel(BaseModel):
61 61
62 62 cls = Repository
63 63
64 64 def _get_user_group(self, users_group):
65 65 return self._get_instance(UserGroup, users_group,
66 66 callback=UserGroup.get_by_group_name)
67 67
68 68 def _get_repo_group(self, repo_group):
69 69 return self._get_instance(RepoGroup, repo_group,
70 70 callback=RepoGroup.get_by_group_name)
71 71
72 72 def _create_default_perms(self, repository, private):
73 73 # create default permission
74 74 default = 'repository.read'
75 75 def_user = User.get_default_user()
76 76 for p in def_user.user_perms:
77 77 if p.permission.permission_name.startswith('repository.'):
78 78 default = p.permission.permission_name
79 79 break
80 80
81 81 default_perm = 'repository.none' if private else default
82 82
83 83 repo_to_perm = UserRepoToPerm()
84 84 repo_to_perm.permission = Permission.get_by_key(default_perm)
85 85
86 86 repo_to_perm.repository = repository
87 87 repo_to_perm.user_id = def_user.user_id
88 88
89 89 return repo_to_perm
90 90
91 91 @LazyProperty
92 92 def repos_path(self):
93 93 """
94 94 Gets the repositories root path from database
95 95 """
96 96 settings_model = VcsSettingsModel(sa=self.sa)
97 97 return settings_model.get_repos_location()
98 98
99 99 def get(self, repo_id, cache=False):
100 100 repo = self.sa.query(Repository) \
101 101 .filter(Repository.repo_id == repo_id)
102 102
103 103 if cache:
104 104 repo = repo.options(FromCache("sql_cache_short",
105 105 "get_repo_%s" % repo_id))
106 106 return repo.scalar()
107 107
108 108 def get_repo(self, repository):
109 109 return self._get_repo(repository)
110 110
111 111 def get_by_repo_name(self, repo_name, cache=False):
112 112 repo = self.sa.query(Repository) \
113 113 .filter(Repository.repo_name == repo_name)
114 114
115 115 if cache:
116 116 repo = repo.options(FromCache("sql_cache_short",
117 117 "get_repo_%s" % repo_name))
118 118 return repo.scalar()
119 119
120 120 def _extract_id_from_repo_name(self, repo_name):
121 121 if repo_name.startswith('/'):
122 122 repo_name = repo_name.lstrip('/')
123 123 by_id_match = re.match(r'^_(\d{1,})', repo_name)
124 124 if by_id_match:
125 125 return by_id_match.groups()[0]
126 126
127 127 def get_repo_by_id(self, repo_name):
128 128 """
129 129 Extracts repo_name by id from special urls.
130 130 Example url is _11/repo_name
131 131
132 132 :param repo_name:
133 133 :return: repo object if matched else None
134 134 """
135 135 try:
136 136 _repo_id = self._extract_id_from_repo_name(repo_name)
137 137 if _repo_id:
138 138 return self.get(_repo_id)
139 139 except Exception:
140 140 log.exception('Failed to extract repo_name from URL')
141 141
142 142 return None
143 143
144 144 def get_url(self, repo):
145 return url('summary_home', repo_name=repo.repo_name, qualified=True)
145 return h.url('summary_home', repo_name=repo.repo_name, qualified=True)
146 146
147 147 def get_users(self, name_contains=None, limit=20, only_active=True):
148 148 # TODO: mikhail: move this method to the UserModel.
149 149 query = self.sa.query(User)
150 150 if only_active:
151 151 query = query.filter(User.active == true())
152 152
153 153 if name_contains:
154 154 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
155 155 query = query.filter(
156 156 or_(
157 157 User.name.ilike(ilike_expression),
158 158 User.lastname.ilike(ilike_expression),
159 159 User.username.ilike(ilike_expression)
160 160 )
161 161 )
162 162 query = query.limit(limit)
163 163 users = query.all()
164 164
165 165 _users = [
166 166 {
167 167 'id': user.user_id,
168 168 'first_name': user.name,
169 169 'last_name': user.lastname,
170 170 'username': user.username,
171 171 'icon_link': h.gravatar_url(user.email, 14),
172 172 'value_display': h.person(user.email),
173 173 'value': user.username,
174 174 'value_type': 'user',
175 175 'active': user.active,
176 176 }
177 177 for user in users
178 178 ]
179 179 return _users
180 180
181 181 def get_user_groups(self, name_contains=None, limit=20, only_active=True):
182 182 # TODO: mikhail: move this method to the UserGroupModel.
183 183 query = self.sa.query(UserGroup)
184 184 if only_active:
185 185 query = query.filter(UserGroup.users_group_active == true())
186 186
187 187 if name_contains:
188 188 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
189 189 query = query.filter(
190 190 UserGroup.users_group_name.ilike(ilike_expression))\
191 191 .order_by(func.length(UserGroup.users_group_name))\
192 192 .order_by(UserGroup.users_group_name)
193 193
194 194 query = query.limit(limit)
195 195 user_groups = query.all()
196 196 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
197 197 user_groups = UserGroupList(user_groups, perm_set=perm_set)
198 198
199 199 _groups = [
200 200 {
201 201 'id': group.users_group_id,
202 202 # TODO: marcink figure out a way to generate the url for the
203 203 # icon
204 204 'icon_link': '',
205 205 'value_display': 'Group: %s (%d members)' % (
206 206 group.users_group_name, len(group.members),),
207 207 'value': group.users_group_name,
208 208 'value_type': 'user_group',
209 209 'active': group.users_group_active,
210 210 }
211 211 for group in user_groups
212 212 ]
213 213 return _groups
214 214
215 215 @classmethod
216 216 def update_repoinfo(cls, repositories=None):
217 217 if not repositories:
218 218 repositories = Repository.getAll()
219 219 for repo in repositories:
220 220 repo.update_commit_cache()
221 221
222 222 def get_repos_as_dict(self, repo_list=None, admin=False,
223 223 super_user_actions=False):
224 224
225 225 from rhodecode.lib.utils import PartialRenderer
226 226 _render = PartialRenderer('data_table/_dt_elements.html')
227 227 c = _render.c
228 228
229 229 def quick_menu(repo_name):
230 230 return _render('quick_menu', repo_name)
231 231
232 232 def repo_lnk(name, rtype, rstate, private, fork_of):
233 233 return _render('repo_name', name, rtype, rstate, private, fork_of,
234 234 short_name=not admin, admin=False)
235 235
236 236 def last_change(last_change):
237 237 return _render("last_change", last_change)
238 238
239 239 def rss_lnk(repo_name):
240 240 return _render("rss", repo_name)
241 241
242 242 def atom_lnk(repo_name):
243 243 return _render("atom", repo_name)
244 244
245 245 def last_rev(repo_name, cs_cache):
246 246 return _render('revision', repo_name, cs_cache.get('revision'),
247 247 cs_cache.get('raw_id'), cs_cache.get('author'),
248 248 cs_cache.get('message'))
249 249
250 250 def desc(desc):
251 251 if c.visual.stylify_metatags:
252 252 return h.urlify_text(h.escaped_stylize(h.truncate(desc, 60)))
253 253 else:
254 254 return h.urlify_text(h.html_escape(h.truncate(desc, 60)))
255 255
256 256 def state(repo_state):
257 257 return _render("repo_state", repo_state)
258 258
259 259 def repo_actions(repo_name):
260 260 return _render('repo_actions', repo_name, super_user_actions)
261 261
262 262 def user_profile(username):
263 263 return _render('user_profile', username)
264 264
265 265 repos_data = []
266 266 for repo in repo_list:
267 267 cs_cache = repo.changeset_cache
268 268 row = {
269 269 "menu": quick_menu(repo.repo_name),
270 270
271 271 "name": repo_lnk(repo.repo_name, repo.repo_type,
272 272 repo.repo_state, repo.private, repo.fork),
273 273 "name_raw": repo.repo_name.lower(),
274 274
275 275 "last_change": last_change(repo.last_db_change),
276 276 "last_change_raw": datetime_to_time(repo.last_db_change),
277 277
278 278 "last_changeset": last_rev(repo.repo_name, cs_cache),
279 279 "last_changeset_raw": cs_cache.get('revision'),
280 280
281 281 "desc": desc(repo.description),
282 282 "owner": user_profile(repo.user.username),
283 283
284 284 "state": state(repo.repo_state),
285 285 "rss": rss_lnk(repo.repo_name),
286 286
287 287 "atom": atom_lnk(repo.repo_name),
288 288 }
289 289 if admin:
290 290 row.update({
291 291 "action": repo_actions(repo.repo_name),
292 292 })
293 293 repos_data.append(row)
294 294
295 295 return repos_data
296 296
297 297 def _get_defaults(self, repo_name):
298 298 """
299 299 Gets information about repository, and returns a dict for
300 300 usage in forms
301 301
302 302 :param repo_name:
303 303 """
304 304
305 305 repo_info = Repository.get_by_repo_name(repo_name)
306 306
307 307 if repo_info is None:
308 308 return None
309 309
310 310 defaults = repo_info.get_dict()
311 311 defaults['repo_name'] = repo_info.just_name
312 312
313 313 groups = repo_info.groups_with_parents
314 314 parent_group = groups[-1] if groups else None
315 315
316 316 # we use -1 as this is how in HTML, we mark an empty group
317 317 defaults['repo_group'] = getattr(parent_group, 'group_id', -1)
318 318
319 319 keys_to_process = (
320 320 {'k': 'repo_type', 'strip': False},
321 321 {'k': 'repo_enable_downloads', 'strip': True},
322 322 {'k': 'repo_description', 'strip': True},
323 323 {'k': 'repo_enable_locking', 'strip': True},
324 324 {'k': 'repo_landing_rev', 'strip': True},
325 325 {'k': 'clone_uri', 'strip': False},
326 326 {'k': 'repo_private', 'strip': True},
327 327 {'k': 'repo_enable_statistics', 'strip': True}
328 328 )
329 329
330 330 for item in keys_to_process:
331 331 attr = item['k']
332 332 if item['strip']:
333 333 attr = remove_prefix(item['k'], 'repo_')
334 334
335 335 val = defaults[attr]
336 336 if item['k'] == 'repo_landing_rev':
337 337 val = ':'.join(defaults[attr])
338 338 defaults[item['k']] = val
339 339 if item['k'] == 'clone_uri':
340 340 defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden
341 341
342 342 # fill owner
343 343 if repo_info.user:
344 344 defaults.update({'user': repo_info.user.username})
345 345 else:
346 346 replacement_user = User.get_first_super_admin().username
347 347 defaults.update({'user': replacement_user})
348 348
349 349 # fill repository users
350 350 for p in repo_info.repo_to_perm:
351 351 defaults.update({'u_perm_%s' % p.user.user_id:
352 352 p.permission.permission_name})
353 353
354 354 # fill repository groups
355 355 for p in repo_info.users_group_to_perm:
356 356 defaults.update({'g_perm_%s' % p.users_group.users_group_id:
357 357 p.permission.permission_name})
358 358
359 359 return defaults
360 360
361 361 def update(self, repo, **kwargs):
362 362 try:
363 363 cur_repo = self._get_repo(repo)
364 364 source_repo_name = cur_repo.repo_name
365 365 if 'user' in kwargs:
366 366 cur_repo.user = User.get_by_username(kwargs['user'])
367 367
368 368 if 'repo_group' in kwargs:
369 369 cur_repo.group = RepoGroup.get(kwargs['repo_group'])
370 370 log.debug('Updating repo %s with params:%s', cur_repo, kwargs)
371 371
372 372 update_keys = [
373 373 (1, 'repo_enable_downloads'),
374 374 (1, 'repo_description'),
375 375 (1, 'repo_enable_locking'),
376 376 (1, 'repo_landing_rev'),
377 377 (1, 'repo_private'),
378 378 (1, 'repo_enable_statistics'),
379 379 (0, 'clone_uri'),
380 380 (0, 'fork_id')
381 381 ]
382 382 for strip, k in update_keys:
383 383 if k in kwargs:
384 384 val = kwargs[k]
385 385 if strip:
386 386 k = remove_prefix(k, 'repo_')
387 387 if k == 'clone_uri':
388 388 from rhodecode.model.validators import Missing
389 389 _change = kwargs.get('clone_uri_change')
390 390 if _change in [Missing, 'OLD']:
391 391 # we don't change the value, so use original one
392 392 val = cur_repo.clone_uri
393 393
394 394 setattr(cur_repo, k, val)
395 395
396 396 new_name = cur_repo.get_new_name(kwargs['repo_name'])
397 397 cur_repo.repo_name = new_name
398 398
399 399 # if private flag is set, reset default permission to NONE
400 400 if kwargs.get('repo_private'):
401 401 EMPTY_PERM = 'repository.none'
402 402 RepoModel().grant_user_permission(
403 403 repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM
404 404 )
405 405
406 406 # handle extra fields
407 407 for field in filter(lambda k: k.startswith(RepositoryField.PREFIX),
408 408 kwargs):
409 409 k = RepositoryField.un_prefix_key(field)
410 410 ex_field = RepositoryField.get_by_key_name(
411 411 key=k, repo=cur_repo)
412 412 if ex_field:
413 413 ex_field.field_value = kwargs[field]
414 414 self.sa.add(ex_field)
415 415 self.sa.add(cur_repo)
416 416
417 417 if source_repo_name != new_name:
418 418 # rename repository
419 419 self._rename_filesystem_repo(
420 420 old=source_repo_name, new=new_name)
421 421
422 422 return cur_repo
423 423 except Exception:
424 424 log.error(traceback.format_exc())
425 425 raise
426 426
427 427 def _create_repo(self, repo_name, repo_type, description, owner,
428 428 private=False, clone_uri=None, repo_group=None,
429 429 landing_rev='rev:tip', fork_of=None,
430 430 copy_fork_permissions=False, enable_statistics=False,
431 431 enable_locking=False, enable_downloads=False,
432 432 copy_group_permissions=False,
433 433 state=Repository.STATE_PENDING):
434 434 """
435 435 Create repository inside database with PENDING state, this should be
436 436 only executed by create() repo. With exception of importing existing
437 437 repos
438 438 """
439 439 from rhodecode.model.scm import ScmModel
440 440
441 441 owner = self._get_user(owner)
442 442 fork_of = self._get_repo(fork_of)
443 443 repo_group = self._get_repo_group(safe_int(repo_group))
444 444
445 445 try:
446 446 repo_name = safe_unicode(repo_name)
447 447 description = safe_unicode(description)
448 448 # repo name is just a name of repository
449 449 # while repo_name_full is a full qualified name that is combined
450 450 # with name and path of group
451 451 repo_name_full = repo_name
452 452 repo_name = repo_name.split(Repository.NAME_SEP)[-1]
453 453
454 454 new_repo = Repository()
455 455 new_repo.repo_state = state
456 456 new_repo.enable_statistics = False
457 457 new_repo.repo_name = repo_name_full
458 458 new_repo.repo_type = repo_type
459 459 new_repo.user = owner
460 460 new_repo.group = repo_group
461 461 new_repo.description = description or repo_name
462 462 new_repo.private = private
463 463 new_repo.clone_uri = clone_uri
464 464 new_repo.landing_rev = landing_rev
465 465
466 466 new_repo.enable_statistics = enable_statistics
467 467 new_repo.enable_locking = enable_locking
468 468 new_repo.enable_downloads = enable_downloads
469 469
470 470 if repo_group:
471 471 new_repo.enable_locking = repo_group.enable_locking
472 472
473 473 if fork_of:
474 474 parent_repo = fork_of
475 475 new_repo.fork = parent_repo
476 476
477 477 events.trigger(events.RepoPreCreateEvent(new_repo))
478 478
479 479 self.sa.add(new_repo)
480 480
481 481 EMPTY_PERM = 'repository.none'
482 482 if fork_of and copy_fork_permissions:
483 483 repo = fork_of
484 484 user_perms = UserRepoToPerm.query() \
485 485 .filter(UserRepoToPerm.repository == repo).all()
486 486 group_perms = UserGroupRepoToPerm.query() \
487 487 .filter(UserGroupRepoToPerm.repository == repo).all()
488 488
489 489 for perm in user_perms:
490 490 UserRepoToPerm.create(
491 491 perm.user, new_repo, perm.permission)
492 492
493 493 for perm in group_perms:
494 494 UserGroupRepoToPerm.create(
495 495 perm.users_group, new_repo, perm.permission)
496 496 # in case we copy permissions and also set this repo to private
497 497 # override the default user permission to make it a private
498 498 # repo
499 499 if private:
500 500 RepoModel(self.sa).grant_user_permission(
501 501 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
502 502
503 503 elif repo_group and copy_group_permissions:
504 504 user_perms = UserRepoGroupToPerm.query() \
505 505 .filter(UserRepoGroupToPerm.group == repo_group).all()
506 506
507 507 group_perms = UserGroupRepoGroupToPerm.query() \
508 508 .filter(UserGroupRepoGroupToPerm.group == repo_group).all()
509 509
510 510 for perm in user_perms:
511 511 perm_name = perm.permission.permission_name.replace(
512 512 'group.', 'repository.')
513 513 perm_obj = Permission.get_by_key(perm_name)
514 514 UserRepoToPerm.create(perm.user, new_repo, perm_obj)
515 515
516 516 for perm in group_perms:
517 517 perm_name = perm.permission.permission_name.replace(
518 518 'group.', 'repository.')
519 519 perm_obj = Permission.get_by_key(perm_name)
520 520 UserGroupRepoToPerm.create(
521 521 perm.users_group, new_repo, perm_obj)
522 522
523 523 if private:
524 524 RepoModel(self.sa).grant_user_permission(
525 525 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
526 526
527 527 else:
528 528 perm_obj = self._create_default_perms(new_repo, private)
529 529 self.sa.add(perm_obj)
530 530
531 531 # now automatically start following this repository as owner
532 532 ScmModel(self.sa).toggle_following_repo(new_repo.repo_id,
533 533 owner.user_id)
534 534
535 535 # we need to flush here, in order to check if database won't
536 536 # throw any exceptions, create filesystem dirs at the very end
537 537 self.sa.flush()
538 538 events.trigger(events.RepoCreatedEvent(new_repo))
539 539 return new_repo
540 540
541 541 except Exception:
542 542 log.error(traceback.format_exc())
543 543 raise
544 544
545 545 def create(self, form_data, cur_user):
546 546 """
547 547 Create repository using celery tasks
548 548
549 549 :param form_data:
550 550 :param cur_user:
551 551 """
552 552 from rhodecode.lib.celerylib import tasks, run_task
553 553 return run_task(tasks.create_repo, form_data, cur_user)
554 554
555 555 def update_permissions(self, repo, perm_additions=None, perm_updates=None,
556 556 perm_deletions=None, check_perms=True,
557 557 cur_user=None):
558 558 if not perm_additions:
559 559 perm_additions = []
560 560 if not perm_updates:
561 561 perm_updates = []
562 562 if not perm_deletions:
563 563 perm_deletions = []
564 564
565 565 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
566 566
567 567 # update permissions
568 568 for member_id, perm, member_type in perm_updates:
569 569 member_id = int(member_id)
570 570 if member_type == 'user':
571 571 # this updates also current one if found
572 572 self.grant_user_permission(
573 573 repo=repo, user=member_id, perm=perm)
574 574 else: # set for user group
575 575 # check if we have permissions to alter this usergroup
576 576 member_name = UserGroup.get(member_id).users_group_name
577 577 if not check_perms or HasUserGroupPermissionAny(
578 578 *req_perms)(member_name, user=cur_user):
579 579 self.grant_user_group_permission(
580 580 repo=repo, group_name=member_id, perm=perm)
581 581
582 582 # set new permissions
583 583 for member_id, perm, member_type in perm_additions:
584 584 member_id = int(member_id)
585 585 if member_type == 'user':
586 586 self.grant_user_permission(
587 587 repo=repo, user=member_id, perm=perm)
588 588 else: # set for user group
589 589 # check if we have permissions to alter this usergroup
590 590 member_name = UserGroup.get(member_id).users_group_name
591 591 if not check_perms or HasUserGroupPermissionAny(
592 592 *req_perms)(member_name, user=cur_user):
593 593 self.grant_user_group_permission(
594 594 repo=repo, group_name=member_id, perm=perm)
595 595
596 596 # delete permissions
597 597 for member_id, perm, member_type in perm_deletions:
598 598 member_id = int(member_id)
599 599 if member_type == 'user':
600 600 self.revoke_user_permission(repo=repo, user=member_id)
601 601 else: # set for user group
602 602 # check if we have permissions to alter this usergroup
603 603 member_name = UserGroup.get(member_id).users_group_name
604 604 if not check_perms or HasUserGroupPermissionAny(
605 605 *req_perms)(member_name, user=cur_user):
606 606 self.revoke_user_group_permission(
607 607 repo=repo, group_name=member_id)
608 608
609 609 def create_fork(self, form_data, cur_user):
610 610 """
611 611 Simple wrapper into executing celery task for fork creation
612 612
613 613 :param form_data:
614 614 :param cur_user:
615 615 """
616 616 from rhodecode.lib.celerylib import tasks, run_task
617 617 return run_task(tasks.create_repo_fork, form_data, cur_user)
618 618
619 619 def delete(self, repo, forks=None, fs_remove=True, cur_user=None):
620 620 """
621 621 Delete given repository, forks parameter defines what do do with
622 622 attached forks. Throws AttachedForksError if deleted repo has attached
623 623 forks
624 624
625 625 :param repo:
626 626 :param forks: str 'delete' or 'detach'
627 627 :param fs_remove: remove(archive) repo from filesystem
628 628 """
629 629 if not cur_user:
630 630 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
631 631 repo = self._get_repo(repo)
632 632 if repo:
633 633 if forks == 'detach':
634 634 for r in repo.forks:
635 635 r.fork = None
636 636 self.sa.add(r)
637 637 elif forks == 'delete':
638 638 for r in repo.forks:
639 639 self.delete(r, forks='delete')
640 640 elif [f for f in repo.forks]:
641 641 raise AttachedForksError()
642 642
643 643 old_repo_dict = repo.get_dict()
644 644 events.trigger(events.RepoPreDeleteEvent(repo))
645 645 try:
646 646 self.sa.delete(repo)
647 647 if fs_remove:
648 648 self._delete_filesystem_repo(repo)
649 649 else:
650 650 log.debug('skipping removal from filesystem')
651 651 old_repo_dict.update({
652 652 'deleted_by': cur_user,
653 653 'deleted_on': time.time(),
654 654 })
655 655 log_delete_repository(**old_repo_dict)
656 656 events.trigger(events.RepoDeletedEvent(repo))
657 657 except Exception:
658 658 log.error(traceback.format_exc())
659 659 raise
660 660
661 661 def grant_user_permission(self, repo, user, perm):
662 662 """
663 663 Grant permission for user on given repository, or update existing one
664 664 if found
665 665
666 666 :param repo: Instance of Repository, repository_id, or repository name
667 667 :param user: Instance of User, user_id or username
668 668 :param perm: Instance of Permission, or permission_name
669 669 """
670 670 user = self._get_user(user)
671 671 repo = self._get_repo(repo)
672 672 permission = self._get_perm(perm)
673 673
674 674 # check if we have that permission already
675 675 obj = self.sa.query(UserRepoToPerm) \
676 676 .filter(UserRepoToPerm.user == user) \
677 677 .filter(UserRepoToPerm.repository == repo) \
678 678 .scalar()
679 679 if obj is None:
680 680 # create new !
681 681 obj = UserRepoToPerm()
682 682 obj.repository = repo
683 683 obj.user = user
684 684 obj.permission = permission
685 685 self.sa.add(obj)
686 686 log.debug('Granted perm %s to %s on %s', perm, user, repo)
687 687 action_logger_generic(
688 688 'granted permission: {} to user: {} on repo: {}'.format(
689 689 perm, user, repo), namespace='security.repo')
690 690 return obj
691 691
692 692 def revoke_user_permission(self, repo, user):
693 693 """
694 694 Revoke permission for user on given repository
695 695
696 696 :param repo: Instance of Repository, repository_id, or repository name
697 697 :param user: Instance of User, user_id or username
698 698 """
699 699
700 700 user = self._get_user(user)
701 701 repo = self._get_repo(repo)
702 702
703 703 obj = self.sa.query(UserRepoToPerm) \
704 704 .filter(UserRepoToPerm.repository == repo) \
705 705 .filter(UserRepoToPerm.user == user) \
706 706 .scalar()
707 707 if obj:
708 708 self.sa.delete(obj)
709 709 log.debug('Revoked perm on %s on %s', repo, user)
710 710 action_logger_generic(
711 711 'revoked permission from user: {} on repo: {}'.format(
712 712 user, repo), namespace='security.repo')
713 713
714 714 def grant_user_group_permission(self, repo, group_name, perm):
715 715 """
716 716 Grant permission for user group on given repository, or update
717 717 existing one if found
718 718
719 719 :param repo: Instance of Repository, repository_id, or repository name
720 720 :param group_name: Instance of UserGroup, users_group_id,
721 721 or user group name
722 722 :param perm: Instance of Permission, or permission_name
723 723 """
724 724 repo = self._get_repo(repo)
725 725 group_name = self._get_user_group(group_name)
726 726 permission = self._get_perm(perm)
727 727
728 728 # check if we have that permission already
729 729 obj = self.sa.query(UserGroupRepoToPerm) \
730 730 .filter(UserGroupRepoToPerm.users_group == group_name) \
731 731 .filter(UserGroupRepoToPerm.repository == repo) \
732 732 .scalar()
733 733
734 734 if obj is None:
735 735 # create new
736 736 obj = UserGroupRepoToPerm()
737 737
738 738 obj.repository = repo
739 739 obj.users_group = group_name
740 740 obj.permission = permission
741 741 self.sa.add(obj)
742 742 log.debug('Granted perm %s to %s on %s', perm, group_name, repo)
743 743 action_logger_generic(
744 744 'granted permission: {} to usergroup: {} on repo: {}'.format(
745 745 perm, group_name, repo), namespace='security.repo')
746 746
747 747 return obj
748 748
749 749 def revoke_user_group_permission(self, repo, group_name):
750 750 """
751 751 Revoke permission for user group on given repository
752 752
753 753 :param repo: Instance of Repository, repository_id, or repository name
754 754 :param group_name: Instance of UserGroup, users_group_id,
755 755 or user group name
756 756 """
757 757 repo = self._get_repo(repo)
758 758 group_name = self._get_user_group(group_name)
759 759
760 760 obj = self.sa.query(UserGroupRepoToPerm) \
761 761 .filter(UserGroupRepoToPerm.repository == repo) \
762 762 .filter(UserGroupRepoToPerm.users_group == group_name) \
763 763 .scalar()
764 764 if obj:
765 765 self.sa.delete(obj)
766 766 log.debug('Revoked perm to %s on %s', repo, group_name)
767 767 action_logger_generic(
768 768 'revoked permission from usergroup: {} on repo: {}'.format(
769 769 group_name, repo), namespace='security.repo')
770 770
771 771 def delete_stats(self, repo_name):
772 772 """
773 773 removes stats for given repo
774 774
775 775 :param repo_name:
776 776 """
777 777 repo = self._get_repo(repo_name)
778 778 try:
779 779 obj = self.sa.query(Statistics) \
780 780 .filter(Statistics.repository == repo).scalar()
781 781 if obj:
782 782 self.sa.delete(obj)
783 783 except Exception:
784 784 log.error(traceback.format_exc())
785 785 raise
786 786
787 787 def add_repo_field(self, repo_name, field_key, field_label, field_value='',
788 788 field_type='str', field_desc=''):
789 789
790 790 repo = self._get_repo(repo_name)
791 791
792 792 new_field = RepositoryField()
793 793 new_field.repository = repo
794 794 new_field.field_key = field_key
795 795 new_field.field_type = field_type # python type
796 796 new_field.field_value = field_value
797 797 new_field.field_desc = field_desc
798 798 new_field.field_label = field_label
799 799 self.sa.add(new_field)
800 800 return new_field
801 801
802 802 def delete_repo_field(self, repo_name, field_key):
803 803 repo = self._get_repo(repo_name)
804 804 field = RepositoryField.get_by_key_name(field_key, repo)
805 805 if field:
806 806 self.sa.delete(field)
807 807
808 808 def _create_filesystem_repo(self, repo_name, repo_type, repo_group,
809 809 clone_uri=None, repo_store_location=None,
810 810 use_global_config=False):
811 811 """
812 812 makes repository on filesystem. It's group aware means it'll create
813 813 a repository within a group, and alter the paths accordingly of
814 814 group location
815 815
816 816 :param repo_name:
817 817 :param alias:
818 818 :param parent:
819 819 :param clone_uri:
820 820 :param repo_store_location:
821 821 """
822 822 from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group
823 823 from rhodecode.model.scm import ScmModel
824 824
825 825 if Repository.NAME_SEP in repo_name:
826 826 raise ValueError(
827 827 'repo_name must not contain groups got `%s`' % repo_name)
828 828
829 829 if isinstance(repo_group, RepoGroup):
830 830 new_parent_path = os.sep.join(repo_group.full_path_splitted)
831 831 else:
832 832 new_parent_path = repo_group or ''
833 833
834 834 if repo_store_location:
835 835 _paths = [repo_store_location]
836 836 else:
837 837 _paths = [self.repos_path, new_parent_path, repo_name]
838 838 # we need to make it str for mercurial
839 839 repo_path = os.path.join(*map(lambda x: safe_str(x), _paths))
840 840
841 841 # check if this path is not a repository
842 842 if is_valid_repo(repo_path, self.repos_path):
843 843 raise Exception('This path %s is a valid repository' % repo_path)
844 844
845 845 # check if this path is a group
846 846 if is_valid_repo_group(repo_path, self.repos_path):
847 847 raise Exception('This path %s is a valid group' % repo_path)
848 848
849 849 log.info('creating repo %s in %s from url: `%s`',
850 850 repo_name, safe_unicode(repo_path),
851 851 obfuscate_url_pw(clone_uri))
852 852
853 853 backend = get_backend(repo_type)
854 854
855 855 config_repo = None if use_global_config else repo_name
856 856 if config_repo and new_parent_path:
857 857 config_repo = Repository.NAME_SEP.join(
858 858 (new_parent_path, config_repo))
859 859 config = make_db_config(clear_session=False, repo=config_repo)
860 860 config.set('extensions', 'largefiles', '')
861 861
862 862 # patch and reset hooks section of UI config to not run any
863 863 # hooks on creating remote repo
864 864 config.clear_section('hooks')
865 865
866 866 # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice
867 867 if repo_type == 'git':
868 868 repo = backend(
869 869 repo_path, config=config, create=True, src_url=clone_uri,
870 870 bare=True)
871 871 else:
872 872 repo = backend(
873 873 repo_path, config=config, create=True, src_url=clone_uri)
874 874
875 875 ScmModel().install_hooks(repo, repo_type=repo_type)
876 876
877 877 log.debug('Created repo %s with %s backend',
878 878 safe_unicode(repo_name), safe_unicode(repo_type))
879 879 return repo
880 880
881 881 def _rename_filesystem_repo(self, old, new):
882 882 """
883 883 renames repository on filesystem
884 884
885 885 :param old: old name
886 886 :param new: new name
887 887 """
888 888 log.info('renaming repo from %s to %s', old, new)
889 889
890 890 old_path = os.path.join(self.repos_path, old)
891 891 new_path = os.path.join(self.repos_path, new)
892 892 if os.path.isdir(new_path):
893 893 raise Exception(
894 894 'Was trying to rename to already existing dir %s' % new_path
895 895 )
896 896 shutil.move(old_path, new_path)
897 897
898 898 def _delete_filesystem_repo(self, repo):
899 899 """
900 900 removes repo from filesystem, the removal is acctually made by
901 901 added rm__ prefix into dir, and rename internat .hg/.git dirs so this
902 902 repository is no longer valid for rhodecode, can be undeleted later on
903 903 by reverting the renames on this repository
904 904
905 905 :param repo: repo object
906 906 """
907 907 rm_path = os.path.join(self.repos_path, repo.repo_name)
908 908 repo_group = repo.group
909 909 log.info("Removing repository %s", rm_path)
910 910 # disable hg/git internal that it doesn't get detected as repo
911 911 alias = repo.repo_type
912 912
913 913 config = make_db_config(clear_session=False)
914 914 config.set('extensions', 'largefiles', '')
915 915 bare = getattr(repo.scm_instance(config=config), 'bare', False)
916 916
917 917 # skip this for bare git repos
918 918 if not bare:
919 919 # disable VCS repo
920 920 vcs_path = os.path.join(rm_path, '.%s' % alias)
921 921 if os.path.exists(vcs_path):
922 922 shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias))
923 923
924 924 _now = datetime.now()
925 925 _ms = str(_now.microsecond).rjust(6, '0')
926 926 _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms),
927 927 repo.just_name)
928 928 if repo_group:
929 929 # if repository is in group, prefix the removal path with the group
930 930 args = repo_group.full_path_splitted + [_d]
931 931 _d = os.path.join(*args)
932 932
933 933 if os.path.isdir(rm_path):
934 934 shutil.move(rm_path, os.path.join(self.repos_path, _d))
@@ -1,61 +1,78 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import pytest
22 22
23 23 from rhodecode.tests.events.conftest import EventCatcher
24 24
25 25 from rhodecode.model.pull_request import PullRequestModel
26 26 from rhodecode.events import (
27 27 PullRequestCreateEvent,
28 28 PullRequestUpdateEvent,
29 29 PullRequestReviewEvent,
30 30 PullRequestMergeEvent,
31 31 PullRequestCloseEvent,
32 32 )
33 33
34 # TODO: dan: make the serialization tests complete json comparisons
35 @pytest.mark.backends("git", "hg")
36 @pytest.mark.parametrize('EventClass', [
37 PullRequestCreateEvent,
38 PullRequestUpdateEvent,
39 PullRequestReviewEvent,
40 PullRequestMergeEvent,
41 PullRequestCloseEvent,
42 ])
43 def test_pullrequest_events_serialized(pr_util, EventClass):
44 pr = pr_util.create_pull_request()
45 event = EventClass(pr)
46 data = event.as_dict()
47 assert data['name'] == EventClass.name
48 assert data['repo']['repo_name'] == pr.target_repo.repo_name
49 assert data['pullrequest']['pull_request_id'] == pr.pull_request_id
50
34 51
35 52 @pytest.mark.backends("git", "hg")
36 53 def test_create_pull_request_events(pr_util):
37 54 with EventCatcher() as event_catcher:
38 55 pr_util.create_pull_request()
39 56
40 57 assert PullRequestCreateEvent in event_catcher.events_types
41 58
42 59
43 60 @pytest.mark.backends("git", "hg")
44 61 def test_close_pull_request_events(pr_util, user_admin):
45 62 pr = pr_util.create_pull_request()
46 63
47 64 with EventCatcher() as event_catcher:
48 65 PullRequestModel().close_pull_request(pr, user_admin)
49 66
50 67 assert PullRequestCloseEvent in event_catcher.events_types
51 68
52 69
53 70 @pytest.mark.backends("git", "hg")
54 71 def test_close_pull_request_with_comment_events(pr_util, user_admin):
55 72 pr = pr_util.create_pull_request()
56 73
57 74 with EventCatcher() as event_catcher:
58 75 PullRequestModel().close_pull_request_with_comment(
59 76 pr, user_admin, pr.target_repo)
60 77
61 78 assert PullRequestCloseEvent in event_catcher.events_types
@@ -1,79 +1,113 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import pytest
22 22
23 23 from rhodecode.tests.events.conftest import EventCatcher
24 24
25 25 from rhodecode.lib import hooks_base, utils2
26 26 from rhodecode.model.repo import RepoModel
27 27 from rhodecode.events.repo import (
28 28 RepoPrePullEvent, RepoPullEvent,
29 29 RepoPrePushEvent, RepoPushEvent,
30 30 RepoPreCreateEvent, RepoCreatedEvent,
31 31 RepoPreDeleteEvent, RepoDeletedEvent,
32 32 )
33 33
34 34
35 35 @pytest.fixture
36 36 def scm_extras(user_regular, repo_stub):
37 37 extras = utils2.AttributeDict({
38 38 'ip': '127.0.0.1',
39 39 'username': user_regular.username,
40 40 'action': '',
41 41 'repository': repo_stub.repo_name,
42 42 'scm': repo_stub.scm_instance().alias,
43 43 'config': '',
44 44 'server_url': 'http://example.com',
45 45 'make_lock': None,
46 46 'locked_by': [None],
47 47 'commit_ids': ['a' * 40] * 3,
48 48 })
49 49 return extras
50 50
51 51
52 # TODO: dan: make the serialization tests complete json comparisons
53 @pytest.mark.parametrize('EventClass', [
54 RepoPreCreateEvent, RepoCreatedEvent,
55 RepoPreDeleteEvent, RepoDeletedEvent,
56 ])
57 def test_repo_events_serialized(repo_stub, EventClass):
58 event = EventClass(repo_stub)
59 data = event.as_dict()
60 assert data['name'] == EventClass.name
61 assert data['repo']['repo_name'] == repo_stub.repo_name
62
63
64 @pytest.mark.parametrize('EventClass', [
65 RepoPrePullEvent, RepoPullEvent, RepoPrePushEvent
66 ])
67 def test_vcs_repo_events_serialize(repo_stub, scm_extras, EventClass):
68 event = EventClass(repo_name=repo_stub.repo_name, extras=scm_extras)
69 data = event.as_dict()
70 assert data['name'] == EventClass.name
71 assert data['repo']['repo_name'] == repo_stub.repo_name
72
73
74
75 @pytest.mark.parametrize('EventClass', [RepoPushEvent])
76 def test_vcs_repo_events_serialize(repo_stub, scm_extras, EventClass):
77 event = EventClass(repo_name=repo_stub.repo_name,
78 pushed_commit_ids=scm_extras['commit_ids'],
79 extras=scm_extras)
80 data = event.as_dict()
81 assert data['name'] == EventClass.name
82 assert data['repo']['repo_name'] == repo_stub.repo_name
83
84
52 85 def test_create_delete_repo_fires_events(backend):
53 86 with EventCatcher() as event_catcher:
54 87 repo = backend.create_repo()
55 88 assert event_catcher.events_types == [RepoPreCreateEvent, RepoCreatedEvent]
56 89
57 90 with EventCatcher() as event_catcher:
58 91 RepoModel().delete(repo)
59 92 assert event_catcher.events_types == [RepoPreDeleteEvent, RepoDeletedEvent]
60 93
61 94
62 95 def test_pull_fires_events(scm_extras):
63 96 with EventCatcher() as event_catcher:
64 97 hooks_base.pre_push(scm_extras)
65 98 assert event_catcher.events_types == [RepoPrePushEvent]
66 99
67 100 with EventCatcher() as event_catcher:
68 101 hooks_base.post_push(scm_extras)
69 102 assert event_catcher.events_types == [RepoPushEvent]
70 103
71 104
72 105 def test_push_fires_events(scm_extras):
73 106 with EventCatcher() as event_catcher:
74 107 hooks_base.pre_pull(scm_extras)
75 108 assert event_catcher.events_types == [RepoPrePullEvent]
76 109
77 110 with EventCatcher() as event_catcher:
78 111 hooks_base.post_pull(scm_extras)
79 112 assert event_catcher.events_types == [RepoPullEvent]
113
General Comments 0
You need to be logged in to leave comments. Login now