##// END OF EJS Templates
events: fix bugs with serialization of repo/pr events and add tests for those cases
dan -
r389:06163eeb default
parent child Browse files
Show More
@@ -1,387 +1,388 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Pylons middleware initialization
22 Pylons middleware initialization
23 """
23 """
24 import logging
24 import logging
25
25
26 from paste.registry import RegistryManager
26 from paste.registry import RegistryManager
27 from paste.gzipper import make_gzip_middleware
27 from paste.gzipper import make_gzip_middleware
28 from pylons.wsgiapp import PylonsApp
28 from pylons.wsgiapp import PylonsApp
29 from pyramid.authorization import ACLAuthorizationPolicy
29 from pyramid.authorization import ACLAuthorizationPolicy
30 from pyramid.config import Configurator
30 from pyramid.config import Configurator
31 from pyramid.static import static_view
31 from pyramid.static import static_view
32 from pyramid.settings import asbool, aslist
32 from pyramid.settings import asbool, aslist
33 from pyramid.wsgi import wsgiapp
33 from pyramid.wsgi import wsgiapp
34 from pyramid.httpexceptions import HTTPError, HTTPInternalServerError
34 from pyramid.httpexceptions import HTTPError, HTTPInternalServerError
35 import pyramid.httpexceptions as httpexceptions
35 import pyramid.httpexceptions as httpexceptions
36 from pyramid.renderers import render_to_response, render
36 from pyramid.renderers import render_to_response, render
37 from routes.middleware import RoutesMiddleware
37 from routes.middleware import RoutesMiddleware
38 import routes.util
38 import routes.util
39
39
40 import rhodecode
40 import rhodecode
41 from rhodecode.config import patches
41 from rhodecode.config import patches
42 from rhodecode.config.environment import (
42 from rhodecode.config.environment import (
43 load_environment, load_pyramid_environment)
43 load_environment, load_pyramid_environment)
44 from rhodecode.lib.middleware import csrf
44 from rhodecode.lib.middleware import csrf
45 from rhodecode.lib.middleware.appenlight import wrap_in_appenlight_if_enabled
45 from rhodecode.lib.middleware.appenlight import wrap_in_appenlight_if_enabled
46 from rhodecode.lib.middleware.disable_vcs import DisableVCSPagesWrapper
46 from rhodecode.lib.middleware.disable_vcs import DisableVCSPagesWrapper
47 from rhodecode.lib.middleware.https_fixup import HttpsFixup
47 from rhodecode.lib.middleware.https_fixup import HttpsFixup
48 from rhodecode.lib.middleware.vcs import VCSMiddleware
48 from rhodecode.lib.middleware.vcs import VCSMiddleware
49 from rhodecode.lib.plugins.utils import register_rhodecode_plugin
49 from rhodecode.lib.plugins.utils import register_rhodecode_plugin
50
50
51
51
52 log = logging.getLogger(__name__)
52 log = logging.getLogger(__name__)
53
53
54
54
55 def make_app(global_conf, full_stack=True, static_files=True, **app_conf):
55 def make_app(global_conf, full_stack=True, static_files=True, **app_conf):
56 """Create a Pylons WSGI application and return it
56 """Create a Pylons WSGI application and return it
57
57
58 ``global_conf``
58 ``global_conf``
59 The inherited configuration for this application. Normally from
59 The inherited configuration for this application. Normally from
60 the [DEFAULT] section of the Paste ini file.
60 the [DEFAULT] section of the Paste ini file.
61
61
62 ``full_stack``
62 ``full_stack``
63 Whether or not this application provides a full WSGI stack (by
63 Whether or not this application provides a full WSGI stack (by
64 default, meaning it handles its own exceptions and errors).
64 default, meaning it handles its own exceptions and errors).
65 Disable full_stack when this application is "managed" by
65 Disable full_stack when this application is "managed" by
66 another WSGI middleware.
66 another WSGI middleware.
67
67
68 ``app_conf``
68 ``app_conf``
69 The application's local configuration. Normally specified in
69 The application's local configuration. Normally specified in
70 the [app:<name>] section of the Paste ini file (where <name>
70 the [app:<name>] section of the Paste ini file (where <name>
71 defaults to main).
71 defaults to main).
72
72
73 """
73 """
74 # Apply compatibility patches
74 # Apply compatibility patches
75 patches.kombu_1_5_1_python_2_7_11()
75 patches.kombu_1_5_1_python_2_7_11()
76 patches.inspect_getargspec()
76 patches.inspect_getargspec()
77
77
78 # Configure the Pylons environment
78 # Configure the Pylons environment
79 config = load_environment(global_conf, app_conf)
79 config = load_environment(global_conf, app_conf)
80
80
81 # The Pylons WSGI app
81 # The Pylons WSGI app
82 app = PylonsApp(config=config)
82 app = PylonsApp(config=config)
83 if rhodecode.is_test:
83 if rhodecode.is_test:
84 app = csrf.CSRFDetector(app)
84 app = csrf.CSRFDetector(app)
85
85
86 expected_origin = config.get('expected_origin')
86 expected_origin = config.get('expected_origin')
87 if expected_origin:
87 if expected_origin:
88 # The API can be accessed from other Origins.
88 # The API can be accessed from other Origins.
89 app = csrf.OriginChecker(app, expected_origin,
89 app = csrf.OriginChecker(app, expected_origin,
90 skip_urls=[routes.util.url_for('api')])
90 skip_urls=[routes.util.url_for('api')])
91
91
92
92
93 if asbool(full_stack):
93 if asbool(full_stack):
94
94
95 # Appenlight monitoring and error handler
95 # Appenlight monitoring and error handler
96 app, appenlight_client = wrap_in_appenlight_if_enabled(app, config)
96 app, appenlight_client = wrap_in_appenlight_if_enabled(app, config)
97
97
98 # we want our low level middleware to get to the request ASAP. We don't
98 # we want our low level middleware to get to the request ASAP. We don't
99 # need any pylons stack middleware in them
99 # need any pylons stack middleware in them
100 app = VCSMiddleware(app, config, appenlight_client)
100 app = VCSMiddleware(app, config, appenlight_client)
101
101
102 # Establish the Registry for this application
102 # Establish the Registry for this application
103 app = RegistryManager(app)
103 app = RegistryManager(app)
104
104
105 app.config = config
105 app.config = config
106
106
107 return app
107 return app
108
108
109
109
110 def make_pyramid_app(global_config, **settings):
110 def make_pyramid_app(global_config, **settings):
111 """
111 """
112 Constructs the WSGI application based on Pyramid and wraps the Pylons based
112 Constructs the WSGI application based on Pyramid and wraps the Pylons based
113 application.
113 application.
114
114
115 Specials:
115 Specials:
116
116
117 * We migrate from Pylons to Pyramid. While doing this, we keep both
117 * We migrate from Pylons to Pyramid. While doing this, we keep both
118 frameworks functional. This involves moving some WSGI middlewares around
118 frameworks functional. This involves moving some WSGI middlewares around
119 and providing access to some data internals, so that the old code is
119 and providing access to some data internals, so that the old code is
120 still functional.
120 still functional.
121
121
122 * The application can also be integrated like a plugin via the call to
122 * The application can also be integrated like a plugin via the call to
123 `includeme`. This is accompanied with the other utility functions which
123 `includeme`. This is accompanied with the other utility functions which
124 are called. Changing this should be done with great care to not break
124 are called. Changing this should be done with great care to not break
125 cases when these fragments are assembled from another place.
125 cases when these fragments are assembled from another place.
126
126
127 """
127 """
128 # The edition string should be available in pylons too, so we add it here
128 # The edition string should be available in pylons too, so we add it here
129 # before copying the settings.
129 # before copying the settings.
130 settings.setdefault('rhodecode.edition', 'Community Edition')
130 settings.setdefault('rhodecode.edition', 'Community Edition')
131
131
132 # As long as our Pylons application does expect "unprepared" settings, make
132 # As long as our Pylons application does expect "unprepared" settings, make
133 # sure that we keep an unmodified copy. This avoids unintentional change of
133 # sure that we keep an unmodified copy. This avoids unintentional change of
134 # behavior in the old application.
134 # behavior in the old application.
135 settings_pylons = settings.copy()
135 settings_pylons = settings.copy()
136
136
137 sanitize_settings_and_apply_defaults(settings)
137 sanitize_settings_and_apply_defaults(settings)
138 config = Configurator(settings=settings)
138 config = Configurator(settings=settings)
139 add_pylons_compat_data(config.registry, global_config, settings_pylons)
139 add_pylons_compat_data(config.registry, global_config, settings_pylons)
140
140
141 load_pyramid_environment(global_config, settings)
141 load_pyramid_environment(global_config, settings)
142
142
143 includeme(config)
143 includeme(config)
144 includeme_last(config)
144 includeme_last(config)
145 pyramid_app = config.make_wsgi_app()
145 pyramid_app = config.make_wsgi_app()
146 pyramid_app = wrap_app_in_wsgi_middlewares(pyramid_app, config)
146 pyramid_app = wrap_app_in_wsgi_middlewares(pyramid_app, config)
147 return pyramid_app
147 return pyramid_app
148
148
149
149
150 def add_pylons_compat_data(registry, global_config, settings):
150 def add_pylons_compat_data(registry, global_config, settings):
151 """
151 """
152 Attach data to the registry to support the Pylons integration.
152 Attach data to the registry to support the Pylons integration.
153 """
153 """
154 registry._pylons_compat_global_config = global_config
154 registry._pylons_compat_global_config = global_config
155 registry._pylons_compat_settings = settings
155 registry._pylons_compat_settings = settings
156
156
157
157
158 def webob_to_pyramid_http_response(webob_response):
158 def webob_to_pyramid_http_response(webob_response):
159 ResponseClass = httpexceptions.status_map[webob_response.status_int]
159 ResponseClass = httpexceptions.status_map[webob_response.status_int]
160 pyramid_response = ResponseClass(webob_response.status)
160 pyramid_response = ResponseClass(webob_response.status)
161 pyramid_response.status = webob_response.status
161 pyramid_response.status = webob_response.status
162 pyramid_response.headers.update(webob_response.headers)
162 pyramid_response.headers.update(webob_response.headers)
163 if pyramid_response.headers['content-type'] == 'text/html':
163 if pyramid_response.headers['content-type'] == 'text/html':
164 pyramid_response.headers['content-type'] = 'text/html; charset=UTF-8'
164 pyramid_response.headers['content-type'] = 'text/html; charset=UTF-8'
165 return pyramid_response
165 return pyramid_response
166
166
167
167
168 def error_handler(exception, request):
168 def error_handler(exception, request):
169 # TODO: dan: replace the old pylons error controller with this
169 # TODO: dan: replace the old pylons error controller with this
170 from rhodecode.model.settings import SettingsModel
170 from rhodecode.model.settings import SettingsModel
171 from rhodecode.lib.utils2 import AttributeDict
171 from rhodecode.lib.utils2 import AttributeDict
172
172
173 try:
173 try:
174 rc_config = SettingsModel().get_all_settings()
174 rc_config = SettingsModel().get_all_settings()
175 except Exception:
175 except Exception:
176 log.exception('failed to fetch settings')
176 log.exception('failed to fetch settings')
177 rc_config = {}
177 rc_config = {}
178
178
179 base_response = HTTPInternalServerError()
179 base_response = HTTPInternalServerError()
180 # prefer original exception for the response since it may have headers set
180 # prefer original exception for the response since it may have headers set
181 if isinstance(exception, HTTPError):
181 if isinstance(exception, HTTPError):
182 base_response = exception
182 base_response = exception
183
183
184 c = AttributeDict()
184 c = AttributeDict()
185 c.error_message = base_response.status
185 c.error_message = base_response.status
186 c.error_explanation = base_response.explanation or str(base_response)
186 c.error_explanation = base_response.explanation or str(base_response)
187 c.visual = AttributeDict()
187 c.visual = AttributeDict()
188
188
189 c.visual.rhodecode_support_url = (
189 c.visual.rhodecode_support_url = (
190 request.registry.settings.get('rhodecode_support_url') or
190 request.registry.settings.get('rhodecode_support_url') or
191 request.route_url('rhodecode_support')
191 request.route_url('rhodecode_support')
192 )
192 )
193 c.redirect_time = 0
193 c.redirect_time = 0
194 c.rhodecode_name = rc_config.get('rhodecode_title', '')
194 c.rhodecode_name = rc_config.get('rhodecode_title', '')
195 if not c.rhodecode_name:
195 if not c.rhodecode_name:
196 c.rhodecode_name = 'Rhodecode'
196 c.rhodecode_name = 'Rhodecode'
197
197
198 response = render_to_response(
198 response = render_to_response(
199 '/errors/error_document.html', {'c': c}, request=request,
199 '/errors/error_document.html', {'c': c}, request=request,
200 response=base_response)
200 response=base_response)
201
201
202 return response
202 return response
203
203
204
204
205 def includeme(config):
205 def includeme(config):
206 settings = config.registry.settings
206 settings = config.registry.settings
207
207
208 if asbool(settings.get('appenlight', 'false')):
208 if asbool(settings.get('appenlight', 'false')):
209 config.include('appenlight_client.ext.pyramid_tween')
209 config.include('appenlight_client.ext.pyramid_tween')
210
210
211 # Includes which are required. The application would fail without them.
211 # Includes which are required. The application would fail without them.
212 config.include('pyramid_mako')
212 config.include('pyramid_mako')
213 config.include('pyramid_beaker')
213 config.include('pyramid_beaker')
214 config.include('rhodecode.admin')
214 config.include('rhodecode.admin')
215 config.include('rhodecode.integrations')
215 config.include('rhodecode.authentication')
216 config.include('rhodecode.authentication')
216 config.include('rhodecode.login')
217 config.include('rhodecode.login')
217 config.include('rhodecode.tweens')
218 config.include('rhodecode.tweens')
218 config.include('rhodecode.api')
219 config.include('rhodecode.api')
219 config.add_route(
220 config.add_route(
220 'rhodecode_support', 'https://rhodecode.com/help/', static=True)
221 'rhodecode_support', 'https://rhodecode.com/help/', static=True)
221
222
222 # Set the authorization policy.
223 # Set the authorization policy.
223 authz_policy = ACLAuthorizationPolicy()
224 authz_policy = ACLAuthorizationPolicy()
224 config.set_authorization_policy(authz_policy)
225 config.set_authorization_policy(authz_policy)
225
226
226 # Set the default renderer for HTML templates to mako.
227 # Set the default renderer for HTML templates to mako.
227 config.add_mako_renderer('.html')
228 config.add_mako_renderer('.html')
228
229
229 # plugin information
230 # plugin information
230 config.registry.rhodecode_plugins = {}
231 config.registry.rhodecode_plugins = {}
231
232
232 config.add_directive(
233 config.add_directive(
233 'register_rhodecode_plugin', register_rhodecode_plugin)
234 'register_rhodecode_plugin', register_rhodecode_plugin)
234 # include RhodeCode plugins
235 # include RhodeCode plugins
235 includes = aslist(settings.get('rhodecode.includes', []))
236 includes = aslist(settings.get('rhodecode.includes', []))
236 for inc in includes:
237 for inc in includes:
237 config.include(inc)
238 config.include(inc)
238
239
239 pylons_app = make_app(
240 pylons_app = make_app(
240 config.registry._pylons_compat_global_config,
241 config.registry._pylons_compat_global_config,
241 **config.registry._pylons_compat_settings)
242 **config.registry._pylons_compat_settings)
242 config.registry._pylons_compat_config = pylons_app.config
243 config.registry._pylons_compat_config = pylons_app.config
243
244
244 pylons_app_as_view = wsgiapp(pylons_app)
245 pylons_app_as_view = wsgiapp(pylons_app)
245
246
246 # Protect from VCS Server error related pages when server is not available
247 # Protect from VCS Server error related pages when server is not available
247 vcs_server_enabled = asbool(settings.get('vcs.server.enable', 'true'))
248 vcs_server_enabled = asbool(settings.get('vcs.server.enable', 'true'))
248 if not vcs_server_enabled:
249 if not vcs_server_enabled:
249 pylons_app_as_view = DisableVCSPagesWrapper(pylons_app_as_view)
250 pylons_app_as_view = DisableVCSPagesWrapper(pylons_app_as_view)
250
251
251
252
252 def pylons_app_with_error_handler(context, request):
253 def pylons_app_with_error_handler(context, request):
253 """
254 """
254 Handle exceptions from rc pylons app:
255 Handle exceptions from rc pylons app:
255
256
256 - old webob type exceptions get converted to pyramid exceptions
257 - old webob type exceptions get converted to pyramid exceptions
257 - pyramid exceptions are passed to the error handler view
258 - pyramid exceptions are passed to the error handler view
258 """
259 """
259 try:
260 try:
260 response = pylons_app_as_view(context, request)
261 response = pylons_app_as_view(context, request)
261 if 400 <= response.status_int <= 599: # webob type error responses
262 if 400 <= response.status_int <= 599: # webob type error responses
262 return error_handler(
263 return error_handler(
263 webob_to_pyramid_http_response(response), request)
264 webob_to_pyramid_http_response(response), request)
264 except HTTPError as e: # pyramid type exceptions
265 except HTTPError as e: # pyramid type exceptions
265 return error_handler(e, request)
266 return error_handler(e, request)
266 except Exception:
267 except Exception:
267 if settings.get('debugtoolbar.enabled', False):
268 if settings.get('debugtoolbar.enabled', False):
268 raise
269 raise
269 return error_handler(HTTPInternalServerError(), request)
270 return error_handler(HTTPInternalServerError(), request)
270 return response
271 return response
271
272
272 # This is the glue which allows us to migrate in chunks. By registering the
273 # This is the glue which allows us to migrate in chunks. By registering the
273 # pylons based application as the "Not Found" view in Pyramid, we will
274 # pylons based application as the "Not Found" view in Pyramid, we will
274 # fallback to the old application each time the new one does not yet know
275 # fallback to the old application each time the new one does not yet know
275 # how to handle a request.
276 # how to handle a request.
276 config.add_notfound_view(pylons_app_with_error_handler)
277 config.add_notfound_view(pylons_app_with_error_handler)
277
278
278 if settings.get('debugtoolbar.enabled', False):
279 if settings.get('debugtoolbar.enabled', False):
279 # if toolbar, then only http type exceptions get caught and rendered
280 # if toolbar, then only http type exceptions get caught and rendered
280 ExcClass = HTTPError
281 ExcClass = HTTPError
281 else:
282 else:
282 # if no toolbar, then any exception gets caught and rendered
283 # if no toolbar, then any exception gets caught and rendered
283 ExcClass = Exception
284 ExcClass = Exception
284 config.add_view(error_handler, context=ExcClass)
285 config.add_view(error_handler, context=ExcClass)
285
286
286
287
287 def includeme_last(config):
288 def includeme_last(config):
288 """
289 """
289 The static file catchall needs to be last in the view configuration.
290 The static file catchall needs to be last in the view configuration.
290 """
291 """
291 settings = config.registry.settings
292 settings = config.registry.settings
292
293
293 # Note: johbo: I would prefer to register a prefix for static files at some
294 # Note: johbo: I would prefer to register a prefix for static files at some
294 # point, e.g. move them under '_static/'. This would fully avoid that we
295 # point, e.g. move them under '_static/'. This would fully avoid that we
295 # can have name clashes with a repository name. Imaging someone calling his
296 # can have name clashes with a repository name. Imaging someone calling his
296 # repo "css" ;-) Also having an external web server to serve out the static
297 # repo "css" ;-) Also having an external web server to serve out the static
297 # files seems to be easier to set up if they have a common prefix.
298 # files seems to be easier to set up if they have a common prefix.
298 #
299 #
299 # Example: config.add_static_view('_static', path='rhodecode:public')
300 # Example: config.add_static_view('_static', path='rhodecode:public')
300 #
301 #
301 # It might be an option to register both paths for a while and then migrate
302 # It might be an option to register both paths for a while and then migrate
302 # over to the new location.
303 # over to the new location.
303
304
304 # Serving static files with a catchall.
305 # Serving static files with a catchall.
305 if settings['static_files']:
306 if settings['static_files']:
306 config.add_route('catchall_static', '/*subpath')
307 config.add_route('catchall_static', '/*subpath')
307 config.add_view(
308 config.add_view(
308 static_view('rhodecode:public'), route_name='catchall_static')
309 static_view('rhodecode:public'), route_name='catchall_static')
309
310
310
311
311 def wrap_app_in_wsgi_middlewares(pyramid_app, config):
312 def wrap_app_in_wsgi_middlewares(pyramid_app, config):
312 """
313 """
313 Apply outer WSGI middlewares around the application.
314 Apply outer WSGI middlewares around the application.
314
315
315 Part of this has been moved up from the Pylons layer, so that the
316 Part of this has been moved up from the Pylons layer, so that the
316 data is also available if old Pylons code is hit through an already ported
317 data is also available if old Pylons code is hit through an already ported
317 view.
318 view.
318 """
319 """
319 settings = config.registry.settings
320 settings = config.registry.settings
320
321
321 # enable https redirects based on HTTP_X_URL_SCHEME set by proxy
322 # enable https redirects based on HTTP_X_URL_SCHEME set by proxy
322 pyramid_app = HttpsFixup(pyramid_app, settings)
323 pyramid_app = HttpsFixup(pyramid_app, settings)
323
324
324 # Add RoutesMiddleware to support the pylons compatibility tween during
325 # Add RoutesMiddleware to support the pylons compatibility tween during
325
326
326 # migration to pyramid.
327 # migration to pyramid.
327 pyramid_app = RoutesMiddleware(
328 pyramid_app = RoutesMiddleware(
328 pyramid_app, config.registry._pylons_compat_config['routes.map'])
329 pyramid_app, config.registry._pylons_compat_config['routes.map'])
329
330
330 if asbool(settings.get('appenlight', 'false')):
331 if asbool(settings.get('appenlight', 'false')):
331 pyramid_app, _ = wrap_in_appenlight_if_enabled(
332 pyramid_app, _ = wrap_in_appenlight_if_enabled(
332 pyramid_app, config.registry._pylons_compat_config)
333 pyramid_app, config.registry._pylons_compat_config)
333
334
334 # TODO: johbo: Don't really see why we enable the gzip middleware when
335 # TODO: johbo: Don't really see why we enable the gzip middleware when
335 # serving static files, might be something that should have its own setting
336 # serving static files, might be something that should have its own setting
336 # as well?
337 # as well?
337 if settings['static_files']:
338 if settings['static_files']:
338 pyramid_app = make_gzip_middleware(
339 pyramid_app = make_gzip_middleware(
339 pyramid_app, settings, compress_level=1)
340 pyramid_app, settings, compress_level=1)
340
341
341 return pyramid_app
342 return pyramid_app
342
343
343
344
344 def sanitize_settings_and_apply_defaults(settings):
345 def sanitize_settings_and_apply_defaults(settings):
345 """
346 """
346 Applies settings defaults and does all type conversion.
347 Applies settings defaults and does all type conversion.
347
348
348 We would move all settings parsing and preparation into this place, so that
349 We would move all settings parsing and preparation into this place, so that
349 we have only one place left which deals with this part. The remaining parts
350 we have only one place left which deals with this part. The remaining parts
350 of the application would start to rely fully on well prepared settings.
351 of the application would start to rely fully on well prepared settings.
351
352
352 This piece would later be split up per topic to avoid a big fat monster
353 This piece would later be split up per topic to avoid a big fat monster
353 function.
354 function.
354 """
355 """
355
356
356 # Pyramid's mako renderer has to search in the templates folder so that the
357 # Pyramid's mako renderer has to search in the templates folder so that the
357 # old templates still work. Ported and new templates are expected to use
358 # old templates still work. Ported and new templates are expected to use
358 # real asset specifications for the includes.
359 # real asset specifications for the includes.
359 mako_directories = settings.setdefault('mako.directories', [
360 mako_directories = settings.setdefault('mako.directories', [
360 # Base templates of the original Pylons application
361 # Base templates of the original Pylons application
361 'rhodecode:templates',
362 'rhodecode:templates',
362 ])
363 ])
363 log.debug(
364 log.debug(
364 "Using the following Mako template directories: %s",
365 "Using the following Mako template directories: %s",
365 mako_directories)
366 mako_directories)
366
367
367 # Default includes, possible to change as a user
368 # Default includes, possible to change as a user
368 pyramid_includes = settings.setdefault('pyramid.includes', [
369 pyramid_includes = settings.setdefault('pyramid.includes', [
369 'rhodecode.lib.middleware.request_wrapper',
370 'rhodecode.lib.middleware.request_wrapper',
370 ])
371 ])
371 log.debug(
372 log.debug(
372 "Using the following pyramid.includes: %s",
373 "Using the following pyramid.includes: %s",
373 pyramid_includes)
374 pyramid_includes)
374
375
375 # TODO: johbo: Re-think this, usually the call to config.include
376 # TODO: johbo: Re-think this, usually the call to config.include
376 # should allow to pass in a prefix.
377 # should allow to pass in a prefix.
377 settings.setdefault('rhodecode.api.url', '/_admin/api')
378 settings.setdefault('rhodecode.api.url', '/_admin/api')
378
379
379 _bool_setting(settings, 'vcs.server.enable', 'true')
380 _bool_setting(settings, 'vcs.server.enable', 'true')
380 _bool_setting(settings, 'static_files', 'true')
381 _bool_setting(settings, 'static_files', 'true')
381 _bool_setting(settings, 'is_test', 'false')
382 _bool_setting(settings, 'is_test', 'false')
382
383
383 return settings
384 return settings
384
385
385
386
386 def _bool_setting(settings, name, default):
387 def _bool_setting(settings, name, default):
387 settings[name] = asbool(settings.get(name, default))
388 settings[name] = asbool(settings.get(name, default))
@@ -1,69 +1,70 b''
1 # Copyright (C) 2016-2016 RhodeCode GmbH
1 # Copyright (C) 2016-2016 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 from datetime import datetime
19 from datetime import datetime
20 from marshmallow import Schema, fields
20 from marshmallow import Schema, fields
21 from pyramid.threadlocal import get_current_request
21 from pyramid.threadlocal import get_current_request
22 from rhodecode.lib.utils2 import AttributeDict
22 from rhodecode.lib.utils2 import AttributeDict
23
23
24
24
25 SYSTEM_USER = AttributeDict(dict(
25 SYSTEM_USER = AttributeDict(dict(
26 username='__SYSTEM__'
26 username='__SYSTEM__'
27 ))
27 ))
28
28
29
29
30 class UserSchema(Schema):
30 class UserSchema(Schema):
31 """
31 """
32 Marshmallow schema for a user
32 Marshmallow schema for a user
33 """
33 """
34 username = fields.Str()
34 username = fields.Str()
35
35
36
36
37 class RhodecodeEventSchema(Schema):
37 class RhodecodeEventSchema(Schema):
38 """
38 """
39 Marshmallow schema for a rhodecode event
39 Marshmallow schema for a rhodecode event
40 """
40 """
41 utc_timestamp = fields.DateTime()
41 utc_timestamp = fields.DateTime()
42 acting_user = fields.Nested(UserSchema)
42 actor = fields.Nested(UserSchema)
43 acting_ip = fields.Str()
43 actor_ip = fields.Str()
44 name = fields.Str(attribute='name')
44
45
45
46
46 class RhodecodeEvent(object):
47 class RhodecodeEvent(object):
47 """
48 """
48 Base event class for all Rhodecode events
49 Base event class for all Rhodecode events
49 """
50 """
50 MarshmallowSchema = RhodecodeEventSchema
51 MarshmallowSchema = RhodecodeEventSchema
51
52
52 def __init__(self):
53 def __init__(self):
53 self.request = get_current_request()
54 self.request = get_current_request()
54 self.utc_timestamp = datetime.utcnow()
55 self.utc_timestamp = datetime.utcnow()
55
56
56 @property
57 @property
57 def acting_user(self):
58 def actor(self):
58 if self.request:
59 if self.request:
59 return self.request.user.get_instance()
60 return self.request.user.get_instance()
60 return SYSTEM_USER
61 return SYSTEM_USER
61
62
62 @property
63 @property
63 def acting_ip(self):
64 def actor_ip(self):
64 if self.request:
65 if self.request:
65 return self.request.user.ip_addr
66 return self.request.user.ip_addr
66 return '<no ip available>'
67 return '<no ip available>'
67
68
68 def as_dict(self):
69 def as_dict(self):
69 return self.MarshmallowSchema().dump(self).data No newline at end of file
70 return self.MarshmallowSchema().dump(self).data
@@ -1,149 +1,149 b''
1 # Copyright (C) 2016-2016 RhodeCode GmbH
1 # Copyright (C) 2016-2016 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 from marshmallow import Schema, fields
19 from marshmallow import Schema, fields
20
20
21 from rhodecode.model.db import Repository, Session
21 from rhodecode.model.db import User, Repository, Session
22 from rhodecode.events.base import RhodecodeEvent
22 from rhodecode.events.base import RhodecodeEvent
23
23
24
24
25 def get_pull_request_url(repo):
25 def get_repo_url(repo):
26 from rhodecode.model.repo import RepoModel
26 from rhodecode.model.repo import RepoModel
27 return RepoModel().get_url(repo)
27 return RepoModel().get_url(repo)
28
28
29
29
30 class RepositorySchema(Schema):
30 class RepositorySchema(Schema):
31 """
31 """
32 Marshmallow schema for a repository
32 Marshmallow schema for a repository
33 """
33 """
34 repo_id = fields.Integer()
34 repo_id = fields.Integer()
35 repo_name = fields.Str()
35 repo_name = fields.Str()
36 url = fields.Function(get_pull_request_url)
36 url = fields.Function(get_repo_url)
37
37
38
38
39 class RepoEventSchema(RhodecodeEvent.MarshmallowSchema):
39 class RepoEventSchema(RhodecodeEvent.MarshmallowSchema):
40 """
40 """
41 Marshmallow schema for a repository event
41 Marshmallow schema for a repository event
42 """
42 """
43 repository = fields.Nested(RepositorySchema)
43 repo = fields.Nested(RepositorySchema)
44
44
45
45
46 class RepoEvent(RhodecodeEvent):
46 class RepoEvent(RhodecodeEvent):
47 """
47 """
48 Base class for events acting on a repository.
48 Base class for events acting on a repository.
49
49
50 :param repo: a :class:`Repository` instance
50 :param repo: a :class:`Repository` instance
51 """
51 """
52 MarshmallowSchema = RepoEventSchema
52 MarshmallowSchema = RepoEventSchema
53
53
54 def __init__(self, repo):
54 def __init__(self, repo):
55 super(RepoEvent, self).__init__()
55 super(RepoEvent, self).__init__()
56 self.repo = repo
56 self.repo = repo
57
57
58
58
59 class RepoPreCreateEvent(RepoEvent):
59 class RepoPreCreateEvent(RepoEvent):
60 """
60 """
61 An instance of this class is emitted as an :term:`event` before a repo is
61 An instance of this class is emitted as an :term:`event` before a repo is
62 created.
62 created.
63 """
63 """
64 name = 'repo-pre-create'
64 name = 'repo-pre-create'
65
65
66
66
67 class RepoCreatedEvent(RepoEvent):
67 class RepoCreatedEvent(RepoEvent):
68 """
68 """
69 An instance of this class is emitted as an :term:`event` whenever a repo is
69 An instance of this class is emitted as an :term:`event` whenever a repo is
70 created.
70 created.
71 """
71 """
72 name = 'repo-created'
72 name = 'repo-created'
73
73
74
74
75 class RepoPreDeleteEvent(RepoEvent):
75 class RepoPreDeleteEvent(RepoEvent):
76 """
76 """
77 An instance of this class is emitted as an :term:`event` whenever a repo is
77 An instance of this class is emitted as an :term:`event` whenever a repo is
78 created.
78 created.
79 """
79 """
80 name = 'repo-pre-delete'
80 name = 'repo-pre-delete'
81
81
82
82
83 class RepoDeletedEvent(RepoEvent):
83 class RepoDeletedEvent(RepoEvent):
84 """
84 """
85 An instance of this class is emitted as an :term:`event` whenever a repo is
85 An instance of this class is emitted as an :term:`event` whenever a repo is
86 created.
86 created.
87 """
87 """
88 name = 'repo-deleted'
88 name = 'repo-deleted'
89
89
90
90
91 class RepoVCSEvent(RepoEvent):
91 class RepoVCSEvent(RepoEvent):
92 """
92 """
93 Base class for events triggered by the VCS
93 Base class for events triggered by the VCS
94 """
94 """
95 def __init__(self, repo_name, extras):
95 def __init__(self, repo_name, extras):
96 self.repo = Repository.get_by_repo_name(repo_name)
96 self.repo = Repository.get_by_repo_name(repo_name)
97 if not self.repo:
97 if not self.repo:
98 raise Exception('repo by this name %s does not exist' % repo_name)
98 raise Exception('repo by this name %s does not exist' % repo_name)
99 self.extras = extras
99 self.extras = extras
100 super(RepoVCSEvent, self).__init__(self.repo)
100 super(RepoVCSEvent, self).__init__(self.repo)
101
101
102 @property
102 @property
103 def acting_user(self):
103 def actor(self):
104 if self.extras.get('username'):
104 if self.extras.get('username'):
105 return User.get_by_username(extras['username'])
105 return User.get_by_username(self.extras['username'])
106
106
107 @property
107 @property
108 def acting_ip(self):
108 def actor_ip(self):
109 if self.extras.get('ip'):
109 if self.extras.get('ip'):
110 return User.get_by_username(extras['ip'])
110 return self.extras['ip']
111
111
112
112
113 class RepoPrePullEvent(RepoVCSEvent):
113 class RepoPrePullEvent(RepoVCSEvent):
114 """
114 """
115 An instance of this class is emitted as an :term:`event` before commits
115 An instance of this class is emitted as an :term:`event` before commits
116 are pulled from a repo.
116 are pulled from a repo.
117 """
117 """
118 name = 'repo-pre-pull'
118 name = 'repo-pre-pull'
119
119
120
120
121 class RepoPullEvent(RepoVCSEvent):
121 class RepoPullEvent(RepoVCSEvent):
122 """
122 """
123 An instance of this class is emitted as an :term:`event` after commits
123 An instance of this class is emitted as an :term:`event` after commits
124 are pulled from a repo.
124 are pulled from a repo.
125 """
125 """
126 name = 'repo-pull'
126 name = 'repo-pull'
127
127
128
128
129 class RepoPrePushEvent(RepoVCSEvent):
129 class RepoPrePushEvent(RepoVCSEvent):
130 """
130 """
131 An instance of this class is emitted as an :term:`event` before commits
131 An instance of this class is emitted as an :term:`event` before commits
132 are pushed to a repo.
132 are pushed to a repo.
133 """
133 """
134 name = 'repo-pre-push'
134 name = 'repo-pre-push'
135
135
136
136
137 class RepoPushEvent(RepoVCSEvent):
137 class RepoPushEvent(RepoVCSEvent):
138 """
138 """
139 An instance of this class is emitted as an :term:`event` after commits
139 An instance of this class is emitted as an :term:`event` after commits
140 are pushed to a repo.
140 are pushed to a repo.
141
141
142 :param extras: (optional) dict of data from proxied VCS actions
142 :param extras: (optional) dict of data from proxied VCS actions
143 """
143 """
144 name = 'repo-push'
144 name = 'repo-push'
145
145
146 def __init__(self, repo_name, pushed_commit_ids, extras):
146 def __init__(self, repo_name, pushed_commit_ids, extras):
147 super(RepoPushEvent, self).__init__(repo_name, extras)
147 super(RepoPushEvent, self).__init__(repo_name, extras)
148 self.pushed_commit_ids = pushed_commit_ids
148 self.pushed_commit_ids = pushed_commit_ids
149
149
@@ -1,1153 +1,1153 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2012-2016 RhodeCode GmbH
3 # Copyright (C) 2012-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 """
22 """
23 pull request model for RhodeCode
23 pull request model for RhodeCode
24 """
24 """
25
25
26 from collections import namedtuple
26 from collections import namedtuple
27 import json
27 import json
28 import logging
28 import logging
29 import datetime
29 import datetime
30
30
31 from pylons.i18n.translation import _
31 from pylons.i18n.translation import _
32 from pylons.i18n.translation import lazy_ugettext
32 from pylons.i18n.translation import lazy_ugettext
33
33
34 import rhodecode
34 import rhodecode
35 from rhodecode.lib import helpers as h, hooks_utils, diffs
35 from rhodecode.lib import helpers as h, hooks_utils, diffs
36 from rhodecode.lib.compat import OrderedDict
36 from rhodecode.lib.compat import OrderedDict
37 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
37 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
38 from rhodecode.lib.markup_renderer import (
38 from rhodecode.lib.markup_renderer import (
39 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
39 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
40 from rhodecode.lib.utils import action_logger
40 from rhodecode.lib.utils import action_logger
41 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
41 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
42 from rhodecode.lib.vcs.backends.base import (
42 from rhodecode.lib.vcs.backends.base import (
43 Reference, MergeResponse, MergeFailureReason)
43 Reference, MergeResponse, MergeFailureReason)
44 from rhodecode.lib.vcs.exceptions import (
44 from rhodecode.lib.vcs.exceptions import (
45 CommitDoesNotExistError, EmptyRepositoryError)
45 CommitDoesNotExistError, EmptyRepositoryError)
46 from rhodecode.model import BaseModel
46 from rhodecode.model import BaseModel
47 from rhodecode.model.changeset_status import ChangesetStatusModel
47 from rhodecode.model.changeset_status import ChangesetStatusModel
48 from rhodecode.model.comment import ChangesetCommentsModel
48 from rhodecode.model.comment import ChangesetCommentsModel
49 from rhodecode.model.db import (
49 from rhodecode.model.db import (
50 PullRequest, PullRequestReviewers, Notification, ChangesetStatus,
50 PullRequest, PullRequestReviewers, Notification, ChangesetStatus,
51 PullRequestVersion, ChangesetComment)
51 PullRequestVersion, ChangesetComment)
52 from rhodecode.model.meta import Session
52 from rhodecode.model.meta import Session
53 from rhodecode.model.notification import NotificationModel, \
53 from rhodecode.model.notification import NotificationModel, \
54 EmailNotificationModel
54 EmailNotificationModel
55 from rhodecode.model.scm import ScmModel
55 from rhodecode.model.scm import ScmModel
56 from rhodecode.model.settings import VcsSettingsModel
56 from rhodecode.model.settings import VcsSettingsModel
57
57
58
58
59 log = logging.getLogger(__name__)
59 log = logging.getLogger(__name__)
60
60
61
61
62 class PullRequestModel(BaseModel):
62 class PullRequestModel(BaseModel):
63
63
64 cls = PullRequest
64 cls = PullRequest
65
65
66 DIFF_CONTEXT = 3
66 DIFF_CONTEXT = 3
67
67
68 MERGE_STATUS_MESSAGES = {
68 MERGE_STATUS_MESSAGES = {
69 MergeFailureReason.NONE: lazy_ugettext(
69 MergeFailureReason.NONE: lazy_ugettext(
70 'This pull request can be automatically merged.'),
70 'This pull request can be automatically merged.'),
71 MergeFailureReason.UNKNOWN: lazy_ugettext(
71 MergeFailureReason.UNKNOWN: lazy_ugettext(
72 'This pull request cannot be merged because of an unhandled'
72 'This pull request cannot be merged because of an unhandled'
73 ' exception.'),
73 ' exception.'),
74 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
74 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
75 'This pull request cannot be merged because of conflicts.'),
75 'This pull request cannot be merged because of conflicts.'),
76 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
76 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
77 'This pull request could not be merged because push to target'
77 'This pull request could not be merged because push to target'
78 ' failed.'),
78 ' failed.'),
79 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
79 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
80 'This pull request cannot be merged because the target is not a'
80 'This pull request cannot be merged because the target is not a'
81 ' head.'),
81 ' head.'),
82 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
82 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
83 'This pull request cannot be merged because the source contains'
83 'This pull request cannot be merged because the source contains'
84 ' more branches than the target.'),
84 ' more branches than the target.'),
85 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
85 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
86 'This pull request cannot be merged because the target has'
86 'This pull request cannot be merged because the target has'
87 ' multiple heads.'),
87 ' multiple heads.'),
88 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
88 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
89 'This pull request cannot be merged because the target repository'
89 'This pull request cannot be merged because the target repository'
90 ' is locked.'),
90 ' is locked.'),
91 MergeFailureReason.MISSING_COMMIT: lazy_ugettext(
91 MergeFailureReason.MISSING_COMMIT: lazy_ugettext(
92 'This pull request cannot be merged because the target or the '
92 'This pull request cannot be merged because the target or the '
93 'source reference is missing.'),
93 'source reference is missing.'),
94 }
94 }
95
95
96 def __get_pull_request(self, pull_request):
96 def __get_pull_request(self, pull_request):
97 return self._get_instance(PullRequest, pull_request)
97 return self._get_instance(PullRequest, pull_request)
98
98
99 def _check_perms(self, perms, pull_request, user, api=False):
99 def _check_perms(self, perms, pull_request, user, api=False):
100 if not api:
100 if not api:
101 return h.HasRepoPermissionAny(*perms)(
101 return h.HasRepoPermissionAny(*perms)(
102 user=user, repo_name=pull_request.target_repo.repo_name)
102 user=user, repo_name=pull_request.target_repo.repo_name)
103 else:
103 else:
104 return h.HasRepoPermissionAnyApi(*perms)(
104 return h.HasRepoPermissionAnyApi(*perms)(
105 user=user, repo_name=pull_request.target_repo.repo_name)
105 user=user, repo_name=pull_request.target_repo.repo_name)
106
106
107 def check_user_read(self, pull_request, user, api=False):
107 def check_user_read(self, pull_request, user, api=False):
108 _perms = ('repository.admin', 'repository.write', 'repository.read',)
108 _perms = ('repository.admin', 'repository.write', 'repository.read',)
109 return self._check_perms(_perms, pull_request, user, api)
109 return self._check_perms(_perms, pull_request, user, api)
110
110
111 def check_user_merge(self, pull_request, user, api=False):
111 def check_user_merge(self, pull_request, user, api=False):
112 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
112 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
113 return self._check_perms(_perms, pull_request, user, api)
113 return self._check_perms(_perms, pull_request, user, api)
114
114
115 def check_user_update(self, pull_request, user, api=False):
115 def check_user_update(self, pull_request, user, api=False):
116 owner = user.user_id == pull_request.user_id
116 owner = user.user_id == pull_request.user_id
117 return self.check_user_merge(pull_request, user, api) or owner
117 return self.check_user_merge(pull_request, user, api) or owner
118
118
119 def check_user_change_status(self, pull_request, user, api=False):
119 def check_user_change_status(self, pull_request, user, api=False):
120 reviewer = user.user_id in [x.user_id for x in
120 reviewer = user.user_id in [x.user_id for x in
121 pull_request.reviewers]
121 pull_request.reviewers]
122 return self.check_user_update(pull_request, user, api) or reviewer
122 return self.check_user_update(pull_request, user, api) or reviewer
123
123
124 def get(self, pull_request):
124 def get(self, pull_request):
125 return self.__get_pull_request(pull_request)
125 return self.__get_pull_request(pull_request)
126
126
127 def _prepare_get_all_query(self, repo_name, source=False, statuses=None,
127 def _prepare_get_all_query(self, repo_name, source=False, statuses=None,
128 opened_by=None, order_by=None,
128 opened_by=None, order_by=None,
129 order_dir='desc'):
129 order_dir='desc'):
130 repo = self._get_repo(repo_name)
130 repo = self._get_repo(repo_name)
131 q = PullRequest.query()
131 q = PullRequest.query()
132 # source or target
132 # source or target
133 if source:
133 if source:
134 q = q.filter(PullRequest.source_repo == repo)
134 q = q.filter(PullRequest.source_repo == repo)
135 else:
135 else:
136 q = q.filter(PullRequest.target_repo == repo)
136 q = q.filter(PullRequest.target_repo == repo)
137
137
138 # closed,opened
138 # closed,opened
139 if statuses:
139 if statuses:
140 q = q.filter(PullRequest.status.in_(statuses))
140 q = q.filter(PullRequest.status.in_(statuses))
141
141
142 # opened by filter
142 # opened by filter
143 if opened_by:
143 if opened_by:
144 q = q.filter(PullRequest.user_id.in_(opened_by))
144 q = q.filter(PullRequest.user_id.in_(opened_by))
145
145
146 if order_by:
146 if order_by:
147 order_map = {
147 order_map = {
148 'name_raw': PullRequest.pull_request_id,
148 'name_raw': PullRequest.pull_request_id,
149 'title': PullRequest.title,
149 'title': PullRequest.title,
150 'updated_on_raw': PullRequest.updated_on
150 'updated_on_raw': PullRequest.updated_on
151 }
151 }
152 if order_dir == 'asc':
152 if order_dir == 'asc':
153 q = q.order_by(order_map[order_by].asc())
153 q = q.order_by(order_map[order_by].asc())
154 else:
154 else:
155 q = q.order_by(order_map[order_by].desc())
155 q = q.order_by(order_map[order_by].desc())
156
156
157 return q
157 return q
158
158
159 def count_all(self, repo_name, source=False, statuses=None,
159 def count_all(self, repo_name, source=False, statuses=None,
160 opened_by=None):
160 opened_by=None):
161 """
161 """
162 Count the number of pull requests for a specific repository.
162 Count the number of pull requests for a specific repository.
163
163
164 :param repo_name: target or source repo
164 :param repo_name: target or source repo
165 :param source: boolean flag to specify if repo_name refers to source
165 :param source: boolean flag to specify if repo_name refers to source
166 :param statuses: list of pull request statuses
166 :param statuses: list of pull request statuses
167 :param opened_by: author user of the pull request
167 :param opened_by: author user of the pull request
168 :returns: int number of pull requests
168 :returns: int number of pull requests
169 """
169 """
170 q = self._prepare_get_all_query(
170 q = self._prepare_get_all_query(
171 repo_name, source=source, statuses=statuses, opened_by=opened_by)
171 repo_name, source=source, statuses=statuses, opened_by=opened_by)
172
172
173 return q.count()
173 return q.count()
174
174
175 def get_all(self, repo_name, source=False, statuses=None, opened_by=None,
175 def get_all(self, repo_name, source=False, statuses=None, opened_by=None,
176 offset=0, length=None, order_by=None, order_dir='desc'):
176 offset=0, length=None, order_by=None, order_dir='desc'):
177 """
177 """
178 Get all pull requests for a specific repository.
178 Get all pull requests for a specific repository.
179
179
180 :param repo_name: target or source repo
180 :param repo_name: target or source repo
181 :param source: boolean flag to specify if repo_name refers to source
181 :param source: boolean flag to specify if repo_name refers to source
182 :param statuses: list of pull request statuses
182 :param statuses: list of pull request statuses
183 :param opened_by: author user of the pull request
183 :param opened_by: author user of the pull request
184 :param offset: pagination offset
184 :param offset: pagination offset
185 :param length: length of returned list
185 :param length: length of returned list
186 :param order_by: order of the returned list
186 :param order_by: order of the returned list
187 :param order_dir: 'asc' or 'desc' ordering direction
187 :param order_dir: 'asc' or 'desc' ordering direction
188 :returns: list of pull requests
188 :returns: list of pull requests
189 """
189 """
190 q = self._prepare_get_all_query(
190 q = self._prepare_get_all_query(
191 repo_name, source=source, statuses=statuses, opened_by=opened_by,
191 repo_name, source=source, statuses=statuses, opened_by=opened_by,
192 order_by=order_by, order_dir=order_dir)
192 order_by=order_by, order_dir=order_dir)
193
193
194 if length:
194 if length:
195 pull_requests = q.limit(length).offset(offset).all()
195 pull_requests = q.limit(length).offset(offset).all()
196 else:
196 else:
197 pull_requests = q.all()
197 pull_requests = q.all()
198
198
199 return pull_requests
199 return pull_requests
200
200
201 def count_awaiting_review(self, repo_name, source=False, statuses=None,
201 def count_awaiting_review(self, repo_name, source=False, statuses=None,
202 opened_by=None):
202 opened_by=None):
203 """
203 """
204 Count the number of pull requests for a specific repository that are
204 Count the number of pull requests for a specific repository that are
205 awaiting review.
205 awaiting review.
206
206
207 :param repo_name: target or source repo
207 :param repo_name: target or source repo
208 :param source: boolean flag to specify if repo_name refers to source
208 :param source: boolean flag to specify if repo_name refers to source
209 :param statuses: list of pull request statuses
209 :param statuses: list of pull request statuses
210 :param opened_by: author user of the pull request
210 :param opened_by: author user of the pull request
211 :returns: int number of pull requests
211 :returns: int number of pull requests
212 """
212 """
213 pull_requests = self.get_awaiting_review(
213 pull_requests = self.get_awaiting_review(
214 repo_name, source=source, statuses=statuses, opened_by=opened_by)
214 repo_name, source=source, statuses=statuses, opened_by=opened_by)
215
215
216 return len(pull_requests)
216 return len(pull_requests)
217
217
218 def get_awaiting_review(self, repo_name, source=False, statuses=None,
218 def get_awaiting_review(self, repo_name, source=False, statuses=None,
219 opened_by=None, offset=0, length=None,
219 opened_by=None, offset=0, length=None,
220 order_by=None, order_dir='desc'):
220 order_by=None, order_dir='desc'):
221 """
221 """
222 Get all pull requests for a specific repository that are awaiting
222 Get all pull requests for a specific repository that are awaiting
223 review.
223 review.
224
224
225 :param repo_name: target or source repo
225 :param repo_name: target or source repo
226 :param source: boolean flag to specify if repo_name refers to source
226 :param source: boolean flag to specify if repo_name refers to source
227 :param statuses: list of pull request statuses
227 :param statuses: list of pull request statuses
228 :param opened_by: author user of the pull request
228 :param opened_by: author user of the pull request
229 :param offset: pagination offset
229 :param offset: pagination offset
230 :param length: length of returned list
230 :param length: length of returned list
231 :param order_by: order of the returned list
231 :param order_by: order of the returned list
232 :param order_dir: 'asc' or 'desc' ordering direction
232 :param order_dir: 'asc' or 'desc' ordering direction
233 :returns: list of pull requests
233 :returns: list of pull requests
234 """
234 """
235 pull_requests = self.get_all(
235 pull_requests = self.get_all(
236 repo_name, source=source, statuses=statuses, opened_by=opened_by,
236 repo_name, source=source, statuses=statuses, opened_by=opened_by,
237 order_by=order_by, order_dir=order_dir)
237 order_by=order_by, order_dir=order_dir)
238
238
239 _filtered_pull_requests = []
239 _filtered_pull_requests = []
240 for pr in pull_requests:
240 for pr in pull_requests:
241 status = pr.calculated_review_status()
241 status = pr.calculated_review_status()
242 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
242 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
243 ChangesetStatus.STATUS_UNDER_REVIEW]:
243 ChangesetStatus.STATUS_UNDER_REVIEW]:
244 _filtered_pull_requests.append(pr)
244 _filtered_pull_requests.append(pr)
245 if length:
245 if length:
246 return _filtered_pull_requests[offset:offset+length]
246 return _filtered_pull_requests[offset:offset+length]
247 else:
247 else:
248 return _filtered_pull_requests
248 return _filtered_pull_requests
249
249
250 def count_awaiting_my_review(self, repo_name, source=False, statuses=None,
250 def count_awaiting_my_review(self, repo_name, source=False, statuses=None,
251 opened_by=None, user_id=None):
251 opened_by=None, user_id=None):
252 """
252 """
253 Count the number of pull requests for a specific repository that are
253 Count the number of pull requests for a specific repository that are
254 awaiting review from a specific user.
254 awaiting review from a specific user.
255
255
256 :param repo_name: target or source repo
256 :param repo_name: target or source repo
257 :param source: boolean flag to specify if repo_name refers to source
257 :param source: boolean flag to specify if repo_name refers to source
258 :param statuses: list of pull request statuses
258 :param statuses: list of pull request statuses
259 :param opened_by: author user of the pull request
259 :param opened_by: author user of the pull request
260 :param user_id: reviewer user of the pull request
260 :param user_id: reviewer user of the pull request
261 :returns: int number of pull requests
261 :returns: int number of pull requests
262 """
262 """
263 pull_requests = self.get_awaiting_my_review(
263 pull_requests = self.get_awaiting_my_review(
264 repo_name, source=source, statuses=statuses, opened_by=opened_by,
264 repo_name, source=source, statuses=statuses, opened_by=opened_by,
265 user_id=user_id)
265 user_id=user_id)
266
266
267 return len(pull_requests)
267 return len(pull_requests)
268
268
269 def get_awaiting_my_review(self, repo_name, source=False, statuses=None,
269 def get_awaiting_my_review(self, repo_name, source=False, statuses=None,
270 opened_by=None, user_id=None, offset=0,
270 opened_by=None, user_id=None, offset=0,
271 length=None, order_by=None, order_dir='desc'):
271 length=None, order_by=None, order_dir='desc'):
272 """
272 """
273 Get all pull requests for a specific repository that are awaiting
273 Get all pull requests for a specific repository that are awaiting
274 review from a specific user.
274 review from a specific user.
275
275
276 :param repo_name: target or source repo
276 :param repo_name: target or source repo
277 :param source: boolean flag to specify if repo_name refers to source
277 :param source: boolean flag to specify if repo_name refers to source
278 :param statuses: list of pull request statuses
278 :param statuses: list of pull request statuses
279 :param opened_by: author user of the pull request
279 :param opened_by: author user of the pull request
280 :param user_id: reviewer user of the pull request
280 :param user_id: reviewer user of the pull request
281 :param offset: pagination offset
281 :param offset: pagination offset
282 :param length: length of returned list
282 :param length: length of returned list
283 :param order_by: order of the returned list
283 :param order_by: order of the returned list
284 :param order_dir: 'asc' or 'desc' ordering direction
284 :param order_dir: 'asc' or 'desc' ordering direction
285 :returns: list of pull requests
285 :returns: list of pull requests
286 """
286 """
287 pull_requests = self.get_all(
287 pull_requests = self.get_all(
288 repo_name, source=source, statuses=statuses, opened_by=opened_by,
288 repo_name, source=source, statuses=statuses, opened_by=opened_by,
289 order_by=order_by, order_dir=order_dir)
289 order_by=order_by, order_dir=order_dir)
290
290
291 _my = PullRequestModel().get_not_reviewed(user_id)
291 _my = PullRequestModel().get_not_reviewed(user_id)
292 my_participation = []
292 my_participation = []
293 for pr in pull_requests:
293 for pr in pull_requests:
294 if pr in _my:
294 if pr in _my:
295 my_participation.append(pr)
295 my_participation.append(pr)
296 _filtered_pull_requests = my_participation
296 _filtered_pull_requests = my_participation
297 if length:
297 if length:
298 return _filtered_pull_requests[offset:offset+length]
298 return _filtered_pull_requests[offset:offset+length]
299 else:
299 else:
300 return _filtered_pull_requests
300 return _filtered_pull_requests
301
301
302 def get_not_reviewed(self, user_id):
302 def get_not_reviewed(self, user_id):
303 return [
303 return [
304 x.pull_request for x in PullRequestReviewers.query().filter(
304 x.pull_request for x in PullRequestReviewers.query().filter(
305 PullRequestReviewers.user_id == user_id).all()
305 PullRequestReviewers.user_id == user_id).all()
306 ]
306 ]
307
307
308 def get_versions(self, pull_request):
308 def get_versions(self, pull_request):
309 """
309 """
310 returns version of pull request sorted by ID descending
310 returns version of pull request sorted by ID descending
311 """
311 """
312 return PullRequestVersion.query()\
312 return PullRequestVersion.query()\
313 .filter(PullRequestVersion.pull_request == pull_request)\
313 .filter(PullRequestVersion.pull_request == pull_request)\
314 .order_by(PullRequestVersion.pull_request_version_id.asc())\
314 .order_by(PullRequestVersion.pull_request_version_id.asc())\
315 .all()
315 .all()
316
316
317 def create(self, created_by, source_repo, source_ref, target_repo,
317 def create(self, created_by, source_repo, source_ref, target_repo,
318 target_ref, revisions, reviewers, title, description=None):
318 target_ref, revisions, reviewers, title, description=None):
319 created_by_user = self._get_user(created_by)
319 created_by_user = self._get_user(created_by)
320 source_repo = self._get_repo(source_repo)
320 source_repo = self._get_repo(source_repo)
321 target_repo = self._get_repo(target_repo)
321 target_repo = self._get_repo(target_repo)
322
322
323 pull_request = PullRequest()
323 pull_request = PullRequest()
324 pull_request.source_repo = source_repo
324 pull_request.source_repo = source_repo
325 pull_request.source_ref = source_ref
325 pull_request.source_ref = source_ref
326 pull_request.target_repo = target_repo
326 pull_request.target_repo = target_repo
327 pull_request.target_ref = target_ref
327 pull_request.target_ref = target_ref
328 pull_request.revisions = revisions
328 pull_request.revisions = revisions
329 pull_request.title = title
329 pull_request.title = title
330 pull_request.description = description
330 pull_request.description = description
331 pull_request.author = created_by_user
331 pull_request.author = created_by_user
332
332
333 Session().add(pull_request)
333 Session().add(pull_request)
334 Session().flush()
334 Session().flush()
335
335
336 # members / reviewers
336 # members / reviewers
337 for user_id in set(reviewers):
337 for user_id in set(reviewers):
338 user = self._get_user(user_id)
338 user = self._get_user(user_id)
339 reviewer = PullRequestReviewers(user, pull_request)
339 reviewer = PullRequestReviewers(user, pull_request)
340 Session().add(reviewer)
340 Session().add(reviewer)
341
341
342 # Set approval status to "Under Review" for all commits which are
342 # Set approval status to "Under Review" for all commits which are
343 # part of this pull request.
343 # part of this pull request.
344 ChangesetStatusModel().set_status(
344 ChangesetStatusModel().set_status(
345 repo=target_repo,
345 repo=target_repo,
346 status=ChangesetStatus.STATUS_UNDER_REVIEW,
346 status=ChangesetStatus.STATUS_UNDER_REVIEW,
347 user=created_by_user,
347 user=created_by_user,
348 pull_request=pull_request
348 pull_request=pull_request
349 )
349 )
350
350
351 self.notify_reviewers(pull_request, reviewers)
351 self.notify_reviewers(pull_request, reviewers)
352 self._trigger_pull_request_hook(
352 self._trigger_pull_request_hook(
353 pull_request, created_by_user, 'create')
353 pull_request, created_by_user, 'create')
354
354
355 return pull_request
355 return pull_request
356
356
357 def _trigger_pull_request_hook(self, pull_request, user, action):
357 def _trigger_pull_request_hook(self, pull_request, user, action):
358 pull_request = self.__get_pull_request(pull_request)
358 pull_request = self.__get_pull_request(pull_request)
359 target_scm = pull_request.target_repo.scm_instance()
359 target_scm = pull_request.target_repo.scm_instance()
360 if action == 'create':
360 if action == 'create':
361 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
361 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
362 elif action == 'merge':
362 elif action == 'merge':
363 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
363 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
364 elif action == 'close':
364 elif action == 'close':
365 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
365 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
366 elif action == 'review_status_change':
366 elif action == 'review_status_change':
367 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
367 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
368 elif action == 'update':
368 elif action == 'update':
369 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
369 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
370 else:
370 else:
371 return
371 return
372
372
373 trigger_hook(
373 trigger_hook(
374 username=user.username,
374 username=user.username,
375 repo_name=pull_request.target_repo.repo_name,
375 repo_name=pull_request.target_repo.repo_name,
376 repo_alias=target_scm.alias,
376 repo_alias=target_scm.alias,
377 pull_request=pull_request)
377 pull_request=pull_request)
378
378
379 def _get_commit_ids(self, pull_request):
379 def _get_commit_ids(self, pull_request):
380 """
380 """
381 Return the commit ids of the merged pull request.
381 Return the commit ids of the merged pull request.
382
382
383 This method is not dealing correctly yet with the lack of autoupdates
383 This method is not dealing correctly yet with the lack of autoupdates
384 nor with the implicit target updates.
384 nor with the implicit target updates.
385 For example: if a commit in the source repo is already in the target it
385 For example: if a commit in the source repo is already in the target it
386 will be reported anyways.
386 will be reported anyways.
387 """
387 """
388 merge_rev = pull_request.merge_rev
388 merge_rev = pull_request.merge_rev
389 if merge_rev is None:
389 if merge_rev is None:
390 raise ValueError('This pull request was not merged yet')
390 raise ValueError('This pull request was not merged yet')
391
391
392 commit_ids = list(pull_request.revisions)
392 commit_ids = list(pull_request.revisions)
393 if merge_rev not in commit_ids:
393 if merge_rev not in commit_ids:
394 commit_ids.append(merge_rev)
394 commit_ids.append(merge_rev)
395
395
396 return commit_ids
396 return commit_ids
397
397
398 def merge(self, pull_request, user, extras):
398 def merge(self, pull_request, user, extras):
399 log.debug("Merging pull request %s", pull_request.pull_request_id)
399 log.debug("Merging pull request %s", pull_request.pull_request_id)
400 merge_state = self._merge_pull_request(pull_request, user, extras)
400 merge_state = self._merge_pull_request(pull_request, user, extras)
401 if merge_state.executed:
401 if merge_state.executed:
402 log.debug(
402 log.debug(
403 "Merge was successful, updating the pull request comments.")
403 "Merge was successful, updating the pull request comments.")
404 self._comment_and_close_pr(pull_request, user, merge_state)
404 self._comment_and_close_pr(pull_request, user, merge_state)
405 self._log_action('user_merged_pull_request', user, pull_request)
405 self._log_action('user_merged_pull_request', user, pull_request)
406 else:
406 else:
407 log.warn("Merge failed, not updating the pull request.")
407 log.warn("Merge failed, not updating the pull request.")
408 return merge_state
408 return merge_state
409
409
410 def _merge_pull_request(self, pull_request, user, extras):
410 def _merge_pull_request(self, pull_request, user, extras):
411 target_vcs = pull_request.target_repo.scm_instance()
411 target_vcs = pull_request.target_repo.scm_instance()
412 source_vcs = pull_request.source_repo.scm_instance()
412 source_vcs = pull_request.source_repo.scm_instance()
413 target_ref = self._refresh_reference(
413 target_ref = self._refresh_reference(
414 pull_request.target_ref_parts, target_vcs)
414 pull_request.target_ref_parts, target_vcs)
415
415
416 message = _(
416 message = _(
417 'Merge pull request #%(pr_id)s from '
417 'Merge pull request #%(pr_id)s from '
418 '%(source_repo)s %(source_ref_name)s\n\n %(pr_title)s') % {
418 '%(source_repo)s %(source_ref_name)s\n\n %(pr_title)s') % {
419 'pr_id': pull_request.pull_request_id,
419 'pr_id': pull_request.pull_request_id,
420 'source_repo': source_vcs.name,
420 'source_repo': source_vcs.name,
421 'source_ref_name': pull_request.source_ref_parts.name,
421 'source_ref_name': pull_request.source_ref_parts.name,
422 'pr_title': pull_request.title
422 'pr_title': pull_request.title
423 }
423 }
424
424
425 workspace_id = self._workspace_id(pull_request)
425 workspace_id = self._workspace_id(pull_request)
426 protocol = rhodecode.CONFIG.get('vcs.hooks.protocol')
426 protocol = rhodecode.CONFIG.get('vcs.hooks.protocol')
427 use_direct_calls = rhodecode.CONFIG.get('vcs.hooks.direct_calls')
427 use_direct_calls = rhodecode.CONFIG.get('vcs.hooks.direct_calls')
428 use_rebase = self._use_rebase_for_merging(pull_request)
428 use_rebase = self._use_rebase_for_merging(pull_request)
429
429
430 callback_daemon, extras = prepare_callback_daemon(
430 callback_daemon, extras = prepare_callback_daemon(
431 extras, protocol=protocol, use_direct_calls=use_direct_calls)
431 extras, protocol=protocol, use_direct_calls=use_direct_calls)
432
432
433 with callback_daemon:
433 with callback_daemon:
434 # TODO: johbo: Implement a clean way to run a config_override
434 # TODO: johbo: Implement a clean way to run a config_override
435 # for a single call.
435 # for a single call.
436 target_vcs.config.set(
436 target_vcs.config.set(
437 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
437 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
438 merge_state = target_vcs.merge(
438 merge_state = target_vcs.merge(
439 target_ref, source_vcs, pull_request.source_ref_parts,
439 target_ref, source_vcs, pull_request.source_ref_parts,
440 workspace_id, user_name=user.username,
440 workspace_id, user_name=user.username,
441 user_email=user.email, message=message, use_rebase=use_rebase)
441 user_email=user.email, message=message, use_rebase=use_rebase)
442 return merge_state
442 return merge_state
443
443
444 def _comment_and_close_pr(self, pull_request, user, merge_state):
444 def _comment_and_close_pr(self, pull_request, user, merge_state):
445 pull_request.merge_rev = merge_state.merge_commit_id
445 pull_request.merge_rev = merge_state.merge_commit_id
446 pull_request.updated_on = datetime.datetime.now()
446 pull_request.updated_on = datetime.datetime.now()
447
447
448 ChangesetCommentsModel().create(
448 ChangesetCommentsModel().create(
449 text=unicode(_('Pull request merged and closed')),
449 text=unicode(_('Pull request merged and closed')),
450 repo=pull_request.target_repo.repo_id,
450 repo=pull_request.target_repo.repo_id,
451 user=user.user_id,
451 user=user.user_id,
452 pull_request=pull_request.pull_request_id,
452 pull_request=pull_request.pull_request_id,
453 f_path=None,
453 f_path=None,
454 line_no=None,
454 line_no=None,
455 closing_pr=True
455 closing_pr=True
456 )
456 )
457
457
458 Session().add(pull_request)
458 Session().add(pull_request)
459 Session().flush()
459 Session().flush()
460 # TODO: paris: replace invalidation with less radical solution
460 # TODO: paris: replace invalidation with less radical solution
461 ScmModel().mark_for_invalidation(
461 ScmModel().mark_for_invalidation(
462 pull_request.target_repo.repo_name)
462 pull_request.target_repo.repo_name)
463 self._trigger_pull_request_hook(pull_request, user, 'merge')
463 self._trigger_pull_request_hook(pull_request, user, 'merge')
464
464
465 def has_valid_update_type(self, pull_request):
465 def has_valid_update_type(self, pull_request):
466 source_ref_type = pull_request.source_ref_parts.type
466 source_ref_type = pull_request.source_ref_parts.type
467 return source_ref_type in ['book', 'branch', 'tag']
467 return source_ref_type in ['book', 'branch', 'tag']
468
468
469 def update_commits(self, pull_request):
469 def update_commits(self, pull_request):
470 """
470 """
471 Get the updated list of commits for the pull request
471 Get the updated list of commits for the pull request
472 and return the new pull request version and the list
472 and return the new pull request version and the list
473 of commits processed by this update action
473 of commits processed by this update action
474 """
474 """
475
475
476 pull_request = self.__get_pull_request(pull_request)
476 pull_request = self.__get_pull_request(pull_request)
477 source_ref_type = pull_request.source_ref_parts.type
477 source_ref_type = pull_request.source_ref_parts.type
478 source_ref_name = pull_request.source_ref_parts.name
478 source_ref_name = pull_request.source_ref_parts.name
479 source_ref_id = pull_request.source_ref_parts.commit_id
479 source_ref_id = pull_request.source_ref_parts.commit_id
480
480
481 if not self.has_valid_update_type(pull_request):
481 if not self.has_valid_update_type(pull_request):
482 log.debug(
482 log.debug(
483 "Skipping update of pull request %s due to ref type: %s",
483 "Skipping update of pull request %s due to ref type: %s",
484 pull_request, source_ref_type)
484 pull_request, source_ref_type)
485 return (None, None)
485 return (None, None)
486
486
487 source_repo = pull_request.source_repo.scm_instance()
487 source_repo = pull_request.source_repo.scm_instance()
488 source_commit = source_repo.get_commit(commit_id=source_ref_name)
488 source_commit = source_repo.get_commit(commit_id=source_ref_name)
489 if source_ref_id == source_commit.raw_id:
489 if source_ref_id == source_commit.raw_id:
490 log.debug("Nothing changed in pull request %s", pull_request)
490 log.debug("Nothing changed in pull request %s", pull_request)
491 return (None, None)
491 return (None, None)
492
492
493 # Finally there is a need for an update
493 # Finally there is a need for an update
494 pull_request_version = self._create_version_from_snapshot(pull_request)
494 pull_request_version = self._create_version_from_snapshot(pull_request)
495 self._link_comments_to_version(pull_request_version)
495 self._link_comments_to_version(pull_request_version)
496
496
497 target_ref_type = pull_request.target_ref_parts.type
497 target_ref_type = pull_request.target_ref_parts.type
498 target_ref_name = pull_request.target_ref_parts.name
498 target_ref_name = pull_request.target_ref_parts.name
499 target_ref_id = pull_request.target_ref_parts.commit_id
499 target_ref_id = pull_request.target_ref_parts.commit_id
500 target_repo = pull_request.target_repo.scm_instance()
500 target_repo = pull_request.target_repo.scm_instance()
501
501
502 if target_ref_type in ('tag', 'branch', 'book'):
502 if target_ref_type in ('tag', 'branch', 'book'):
503 target_commit = target_repo.get_commit(target_ref_name)
503 target_commit = target_repo.get_commit(target_ref_name)
504 else:
504 else:
505 target_commit = target_repo.get_commit(target_ref_id)
505 target_commit = target_repo.get_commit(target_ref_id)
506
506
507 # re-compute commit ids
507 # re-compute commit ids
508 old_commit_ids = set(pull_request.revisions)
508 old_commit_ids = set(pull_request.revisions)
509 pre_load = ["author", "branch", "date", "message"]
509 pre_load = ["author", "branch", "date", "message"]
510 commit_ranges = target_repo.compare(
510 commit_ranges = target_repo.compare(
511 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
511 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
512 pre_load=pre_load)
512 pre_load=pre_load)
513
513
514 ancestor = target_repo.get_common_ancestor(
514 ancestor = target_repo.get_common_ancestor(
515 target_commit.raw_id, source_commit.raw_id, source_repo)
515 target_commit.raw_id, source_commit.raw_id, source_repo)
516
516
517 pull_request.source_ref = '%s:%s:%s' % (
517 pull_request.source_ref = '%s:%s:%s' % (
518 source_ref_type, source_ref_name, source_commit.raw_id)
518 source_ref_type, source_ref_name, source_commit.raw_id)
519 pull_request.target_ref = '%s:%s:%s' % (
519 pull_request.target_ref = '%s:%s:%s' % (
520 target_ref_type, target_ref_name, ancestor)
520 target_ref_type, target_ref_name, ancestor)
521 pull_request.revisions = [
521 pull_request.revisions = [
522 commit.raw_id for commit in reversed(commit_ranges)]
522 commit.raw_id for commit in reversed(commit_ranges)]
523 pull_request.updated_on = datetime.datetime.now()
523 pull_request.updated_on = datetime.datetime.now()
524 Session().add(pull_request)
524 Session().add(pull_request)
525 new_commit_ids = set(pull_request.revisions)
525 new_commit_ids = set(pull_request.revisions)
526
526
527 changes = self._calculate_commit_id_changes(
527 changes = self._calculate_commit_id_changes(
528 old_commit_ids, new_commit_ids)
528 old_commit_ids, new_commit_ids)
529
529
530 old_diff_data, new_diff_data = self._generate_update_diffs(
530 old_diff_data, new_diff_data = self._generate_update_diffs(
531 pull_request, pull_request_version)
531 pull_request, pull_request_version)
532
532
533 ChangesetCommentsModel().outdate_comments(
533 ChangesetCommentsModel().outdate_comments(
534 pull_request, old_diff_data=old_diff_data,
534 pull_request, old_diff_data=old_diff_data,
535 new_diff_data=new_diff_data)
535 new_diff_data=new_diff_data)
536
536
537 file_changes = self._calculate_file_changes(
537 file_changes = self._calculate_file_changes(
538 old_diff_data, new_diff_data)
538 old_diff_data, new_diff_data)
539
539
540 # Add an automatic comment to the pull request
540 # Add an automatic comment to the pull request
541 update_comment = ChangesetCommentsModel().create(
541 update_comment = ChangesetCommentsModel().create(
542 text=self._render_update_message(changes, file_changes),
542 text=self._render_update_message(changes, file_changes),
543 repo=pull_request.target_repo,
543 repo=pull_request.target_repo,
544 user=pull_request.author,
544 user=pull_request.author,
545 pull_request=pull_request,
545 pull_request=pull_request,
546 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
546 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
547
547
548 # Update status to "Under Review" for added commits
548 # Update status to "Under Review" for added commits
549 for commit_id in changes.added:
549 for commit_id in changes.added:
550 ChangesetStatusModel().set_status(
550 ChangesetStatusModel().set_status(
551 repo=pull_request.source_repo,
551 repo=pull_request.source_repo,
552 status=ChangesetStatus.STATUS_UNDER_REVIEW,
552 status=ChangesetStatus.STATUS_UNDER_REVIEW,
553 comment=update_comment,
553 comment=update_comment,
554 user=pull_request.author,
554 user=pull_request.author,
555 pull_request=pull_request,
555 pull_request=pull_request,
556 revision=commit_id)
556 revision=commit_id)
557
557
558 log.debug(
558 log.debug(
559 'Updated pull request %s, added_ids: %s, common_ids: %s, '
559 'Updated pull request %s, added_ids: %s, common_ids: %s, '
560 'removed_ids: %s', pull_request.pull_request_id,
560 'removed_ids: %s', pull_request.pull_request_id,
561 changes.added, changes.common, changes.removed)
561 changes.added, changes.common, changes.removed)
562 log.debug('Updated pull request with the following file changes: %s',
562 log.debug('Updated pull request with the following file changes: %s',
563 file_changes)
563 file_changes)
564
564
565 log.info(
565 log.info(
566 "Updated pull request %s from commit %s to commit %s, "
566 "Updated pull request %s from commit %s to commit %s, "
567 "stored new version %s of this pull request.",
567 "stored new version %s of this pull request.",
568 pull_request.pull_request_id, source_ref_id,
568 pull_request.pull_request_id, source_ref_id,
569 pull_request.source_ref_parts.commit_id,
569 pull_request.source_ref_parts.commit_id,
570 pull_request_version.pull_request_version_id)
570 pull_request_version.pull_request_version_id)
571 Session().commit()
571 Session().commit()
572 self._trigger_pull_request_hook(pull_request, pull_request.author,
572 self._trigger_pull_request_hook(pull_request, pull_request.author,
573 'update')
573 'update')
574 return (pull_request_version, changes)
574 return (pull_request_version, changes)
575
575
576 def _create_version_from_snapshot(self, pull_request):
576 def _create_version_from_snapshot(self, pull_request):
577 version = PullRequestVersion()
577 version = PullRequestVersion()
578 version.title = pull_request.title
578 version.title = pull_request.title
579 version.description = pull_request.description
579 version.description = pull_request.description
580 version.status = pull_request.status
580 version.status = pull_request.status
581 version.created_on = pull_request.created_on
581 version.created_on = pull_request.created_on
582 version.updated_on = pull_request.updated_on
582 version.updated_on = pull_request.updated_on
583 version.user_id = pull_request.user_id
583 version.user_id = pull_request.user_id
584 version.source_repo = pull_request.source_repo
584 version.source_repo = pull_request.source_repo
585 version.source_ref = pull_request.source_ref
585 version.source_ref = pull_request.source_ref
586 version.target_repo = pull_request.target_repo
586 version.target_repo = pull_request.target_repo
587 version.target_ref = pull_request.target_ref
587 version.target_ref = pull_request.target_ref
588
588
589 version._last_merge_source_rev = pull_request._last_merge_source_rev
589 version._last_merge_source_rev = pull_request._last_merge_source_rev
590 version._last_merge_target_rev = pull_request._last_merge_target_rev
590 version._last_merge_target_rev = pull_request._last_merge_target_rev
591 version._last_merge_status = pull_request._last_merge_status
591 version._last_merge_status = pull_request._last_merge_status
592 version.merge_rev = pull_request.merge_rev
592 version.merge_rev = pull_request.merge_rev
593
593
594 version.revisions = pull_request.revisions
594 version.revisions = pull_request.revisions
595 version.pull_request = pull_request
595 version.pull_request = pull_request
596 Session().add(version)
596 Session().add(version)
597 Session().flush()
597 Session().flush()
598
598
599 return version
599 return version
600
600
601 def _generate_update_diffs(self, pull_request, pull_request_version):
601 def _generate_update_diffs(self, pull_request, pull_request_version):
602 diff_context = (
602 diff_context = (
603 self.DIFF_CONTEXT +
603 self.DIFF_CONTEXT +
604 ChangesetCommentsModel.needed_extra_diff_context())
604 ChangesetCommentsModel.needed_extra_diff_context())
605 old_diff = self._get_diff_from_pr_or_version(
605 old_diff = self._get_diff_from_pr_or_version(
606 pull_request_version, context=diff_context)
606 pull_request_version, context=diff_context)
607 new_diff = self._get_diff_from_pr_or_version(
607 new_diff = self._get_diff_from_pr_or_version(
608 pull_request, context=diff_context)
608 pull_request, context=diff_context)
609
609
610 old_diff_data = diffs.DiffProcessor(old_diff)
610 old_diff_data = diffs.DiffProcessor(old_diff)
611 old_diff_data.prepare()
611 old_diff_data.prepare()
612 new_diff_data = diffs.DiffProcessor(new_diff)
612 new_diff_data = diffs.DiffProcessor(new_diff)
613 new_diff_data.prepare()
613 new_diff_data.prepare()
614
614
615 return old_diff_data, new_diff_data
615 return old_diff_data, new_diff_data
616
616
617 def _link_comments_to_version(self, pull_request_version):
617 def _link_comments_to_version(self, pull_request_version):
618 """
618 """
619 Link all unlinked comments of this pull request to the given version.
619 Link all unlinked comments of this pull request to the given version.
620
620
621 :param pull_request_version: The `PullRequestVersion` to which
621 :param pull_request_version: The `PullRequestVersion` to which
622 the comments shall be linked.
622 the comments shall be linked.
623
623
624 """
624 """
625 pull_request = pull_request_version.pull_request
625 pull_request = pull_request_version.pull_request
626 comments = ChangesetComment.query().filter(
626 comments = ChangesetComment.query().filter(
627 # TODO: johbo: Should we query for the repo at all here?
627 # TODO: johbo: Should we query for the repo at all here?
628 # Pending decision on how comments of PRs are to be related
628 # Pending decision on how comments of PRs are to be related
629 # to either the source repo, the target repo or no repo at all.
629 # to either the source repo, the target repo or no repo at all.
630 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
630 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
631 ChangesetComment.pull_request == pull_request,
631 ChangesetComment.pull_request == pull_request,
632 ChangesetComment.pull_request_version == None)
632 ChangesetComment.pull_request_version == None)
633
633
634 # TODO: johbo: Find out why this breaks if it is done in a bulk
634 # TODO: johbo: Find out why this breaks if it is done in a bulk
635 # operation.
635 # operation.
636 for comment in comments:
636 for comment in comments:
637 comment.pull_request_version_id = (
637 comment.pull_request_version_id = (
638 pull_request_version.pull_request_version_id)
638 pull_request_version.pull_request_version_id)
639 Session().add(comment)
639 Session().add(comment)
640
640
641 def _calculate_commit_id_changes(self, old_ids, new_ids):
641 def _calculate_commit_id_changes(self, old_ids, new_ids):
642 added = new_ids.difference(old_ids)
642 added = new_ids.difference(old_ids)
643 common = old_ids.intersection(new_ids)
643 common = old_ids.intersection(new_ids)
644 removed = old_ids.difference(new_ids)
644 removed = old_ids.difference(new_ids)
645 return ChangeTuple(added, common, removed)
645 return ChangeTuple(added, common, removed)
646
646
647 def _calculate_file_changes(self, old_diff_data, new_diff_data):
647 def _calculate_file_changes(self, old_diff_data, new_diff_data):
648
648
649 old_files = OrderedDict()
649 old_files = OrderedDict()
650 for diff_data in old_diff_data.parsed_diff:
650 for diff_data in old_diff_data.parsed_diff:
651 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
651 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
652
652
653 added_files = []
653 added_files = []
654 modified_files = []
654 modified_files = []
655 removed_files = []
655 removed_files = []
656 for diff_data in new_diff_data.parsed_diff:
656 for diff_data in new_diff_data.parsed_diff:
657 new_filename = diff_data['filename']
657 new_filename = diff_data['filename']
658 new_hash = md5_safe(diff_data['raw_diff'])
658 new_hash = md5_safe(diff_data['raw_diff'])
659
659
660 old_hash = old_files.get(new_filename)
660 old_hash = old_files.get(new_filename)
661 if not old_hash:
661 if not old_hash:
662 # file is not present in old diff, means it's added
662 # file is not present in old diff, means it's added
663 added_files.append(new_filename)
663 added_files.append(new_filename)
664 else:
664 else:
665 if new_hash != old_hash:
665 if new_hash != old_hash:
666 modified_files.append(new_filename)
666 modified_files.append(new_filename)
667 # now remove a file from old, since we have seen it already
667 # now remove a file from old, since we have seen it already
668 del old_files[new_filename]
668 del old_files[new_filename]
669
669
670 # removed files is when there are present in old, but not in NEW,
670 # removed files is when there are present in old, but not in NEW,
671 # since we remove old files that are present in new diff, left-overs
671 # since we remove old files that are present in new diff, left-overs
672 # if any should be the removed files
672 # if any should be the removed files
673 removed_files.extend(old_files.keys())
673 removed_files.extend(old_files.keys())
674
674
675 return FileChangeTuple(added_files, modified_files, removed_files)
675 return FileChangeTuple(added_files, modified_files, removed_files)
676
676
677 def _render_update_message(self, changes, file_changes):
677 def _render_update_message(self, changes, file_changes):
678 """
678 """
679 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
679 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
680 so it's always looking the same disregarding on which default
680 so it's always looking the same disregarding on which default
681 renderer system is using.
681 renderer system is using.
682
682
683 :param changes: changes named tuple
683 :param changes: changes named tuple
684 :param file_changes: file changes named tuple
684 :param file_changes: file changes named tuple
685
685
686 """
686 """
687 new_status = ChangesetStatus.get_status_lbl(
687 new_status = ChangesetStatus.get_status_lbl(
688 ChangesetStatus.STATUS_UNDER_REVIEW)
688 ChangesetStatus.STATUS_UNDER_REVIEW)
689
689
690 changed_files = (
690 changed_files = (
691 file_changes.added + file_changes.modified + file_changes.removed)
691 file_changes.added + file_changes.modified + file_changes.removed)
692
692
693 params = {
693 params = {
694 'under_review_label': new_status,
694 'under_review_label': new_status,
695 'added_commits': changes.added,
695 'added_commits': changes.added,
696 'removed_commits': changes.removed,
696 'removed_commits': changes.removed,
697 'changed_files': changed_files,
697 'changed_files': changed_files,
698 'added_files': file_changes.added,
698 'added_files': file_changes.added,
699 'modified_files': file_changes.modified,
699 'modified_files': file_changes.modified,
700 'removed_files': file_changes.removed,
700 'removed_files': file_changes.removed,
701 }
701 }
702 renderer = RstTemplateRenderer()
702 renderer = RstTemplateRenderer()
703 return renderer.render('pull_request_update.mako', **params)
703 return renderer.render('pull_request_update.mako', **params)
704
704
705 def edit(self, pull_request, title, description):
705 def edit(self, pull_request, title, description):
706 pull_request = self.__get_pull_request(pull_request)
706 pull_request = self.__get_pull_request(pull_request)
707 if pull_request.is_closed():
707 if pull_request.is_closed():
708 raise ValueError('This pull request is closed')
708 raise ValueError('This pull request is closed')
709 if title:
709 if title:
710 pull_request.title = title
710 pull_request.title = title
711 pull_request.description = description
711 pull_request.description = description
712 pull_request.updated_on = datetime.datetime.now()
712 pull_request.updated_on = datetime.datetime.now()
713 Session().add(pull_request)
713 Session().add(pull_request)
714
714
715 def update_reviewers(self, pull_request, reviewers_ids):
715 def update_reviewers(self, pull_request, reviewers_ids):
716 reviewers_ids = set(reviewers_ids)
716 reviewers_ids = set(reviewers_ids)
717 pull_request = self.__get_pull_request(pull_request)
717 pull_request = self.__get_pull_request(pull_request)
718 current_reviewers = PullRequestReviewers.query()\
718 current_reviewers = PullRequestReviewers.query()\
719 .filter(PullRequestReviewers.pull_request ==
719 .filter(PullRequestReviewers.pull_request ==
720 pull_request).all()
720 pull_request).all()
721 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
721 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
722
722
723 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
723 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
724 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
724 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
725
725
726 log.debug("Adding %s reviewers", ids_to_add)
726 log.debug("Adding %s reviewers", ids_to_add)
727 log.debug("Removing %s reviewers", ids_to_remove)
727 log.debug("Removing %s reviewers", ids_to_remove)
728 changed = False
728 changed = False
729 for uid in ids_to_add:
729 for uid in ids_to_add:
730 changed = True
730 changed = True
731 _usr = self._get_user(uid)
731 _usr = self._get_user(uid)
732 reviewer = PullRequestReviewers(_usr, pull_request)
732 reviewer = PullRequestReviewers(_usr, pull_request)
733 Session().add(reviewer)
733 Session().add(reviewer)
734
734
735 self.notify_reviewers(pull_request, ids_to_add)
735 self.notify_reviewers(pull_request, ids_to_add)
736
736
737 for uid in ids_to_remove:
737 for uid in ids_to_remove:
738 changed = True
738 changed = True
739 reviewer = PullRequestReviewers.query()\
739 reviewer = PullRequestReviewers.query()\
740 .filter(PullRequestReviewers.user_id == uid,
740 .filter(PullRequestReviewers.user_id == uid,
741 PullRequestReviewers.pull_request == pull_request)\
741 PullRequestReviewers.pull_request == pull_request)\
742 .scalar()
742 .scalar()
743 if reviewer:
743 if reviewer:
744 Session().delete(reviewer)
744 Session().delete(reviewer)
745 if changed:
745 if changed:
746 pull_request.updated_on = datetime.datetime.now()
746 pull_request.updated_on = datetime.datetime.now()
747 Session().add(pull_request)
747 Session().add(pull_request)
748
748
749 return ids_to_add, ids_to_remove
749 return ids_to_add, ids_to_remove
750
750
751 def get_url(self, pull_request):
751 def get_url(self, pull_request):
752 return url('pullrequest_show', repo_name=self.target_repo.repo_name,
752 return h.url('pullrequest_show', repo_name=self.target_repo.repo_name,
753 pull_request_id=self.pull_request_id,
753 pull_request_id=self.pull_request_id,
754 qualified=True)
754 qualified=True)
755
755
756 def notify_reviewers(self, pull_request, reviewers_ids):
756 def notify_reviewers(self, pull_request, reviewers_ids):
757 # notification to reviewers
757 # notification to reviewers
758 if not reviewers_ids:
758 if not reviewers_ids:
759 return
759 return
760
760
761 pull_request_obj = pull_request
761 pull_request_obj = pull_request
762 # get the current participants of this pull request
762 # get the current participants of this pull request
763 recipients = reviewers_ids
763 recipients = reviewers_ids
764 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
764 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
765
765
766 pr_source_repo = pull_request_obj.source_repo
766 pr_source_repo = pull_request_obj.source_repo
767 pr_target_repo = pull_request_obj.target_repo
767 pr_target_repo = pull_request_obj.target_repo
768
768
769 pr_url = h.url(
769 pr_url = h.url(
770 'pullrequest_show',
770 'pullrequest_show',
771 repo_name=pr_target_repo.repo_name,
771 repo_name=pr_target_repo.repo_name,
772 pull_request_id=pull_request_obj.pull_request_id,
772 pull_request_id=pull_request_obj.pull_request_id,
773 qualified=True,)
773 qualified=True,)
774
774
775 # set some variables for email notification
775 # set some variables for email notification
776 pr_target_repo_url = h.url(
776 pr_target_repo_url = h.url(
777 'summary_home',
777 'summary_home',
778 repo_name=pr_target_repo.repo_name,
778 repo_name=pr_target_repo.repo_name,
779 qualified=True)
779 qualified=True)
780
780
781 pr_source_repo_url = h.url(
781 pr_source_repo_url = h.url(
782 'summary_home',
782 'summary_home',
783 repo_name=pr_source_repo.repo_name,
783 repo_name=pr_source_repo.repo_name,
784 qualified=True)
784 qualified=True)
785
785
786 # pull request specifics
786 # pull request specifics
787 pull_request_commits = [
787 pull_request_commits = [
788 (x.raw_id, x.message)
788 (x.raw_id, x.message)
789 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
789 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
790
790
791 kwargs = {
791 kwargs = {
792 'user': pull_request.author,
792 'user': pull_request.author,
793 'pull_request': pull_request_obj,
793 'pull_request': pull_request_obj,
794 'pull_request_commits': pull_request_commits,
794 'pull_request_commits': pull_request_commits,
795
795
796 'pull_request_target_repo': pr_target_repo,
796 'pull_request_target_repo': pr_target_repo,
797 'pull_request_target_repo_url': pr_target_repo_url,
797 'pull_request_target_repo_url': pr_target_repo_url,
798
798
799 'pull_request_source_repo': pr_source_repo,
799 'pull_request_source_repo': pr_source_repo,
800 'pull_request_source_repo_url': pr_source_repo_url,
800 'pull_request_source_repo_url': pr_source_repo_url,
801
801
802 'pull_request_url': pr_url,
802 'pull_request_url': pr_url,
803 }
803 }
804
804
805 # pre-generate the subject for notification itself
805 # pre-generate the subject for notification itself
806 (subject,
806 (subject,
807 _h, _e, # we don't care about those
807 _h, _e, # we don't care about those
808 body_plaintext) = EmailNotificationModel().render_email(
808 body_plaintext) = EmailNotificationModel().render_email(
809 notification_type, **kwargs)
809 notification_type, **kwargs)
810
810
811 # create notification objects, and emails
811 # create notification objects, and emails
812 NotificationModel().create(
812 NotificationModel().create(
813 created_by=pull_request.author,
813 created_by=pull_request.author,
814 notification_subject=subject,
814 notification_subject=subject,
815 notification_body=body_plaintext,
815 notification_body=body_plaintext,
816 notification_type=notification_type,
816 notification_type=notification_type,
817 recipients=recipients,
817 recipients=recipients,
818 email_kwargs=kwargs,
818 email_kwargs=kwargs,
819 )
819 )
820
820
821 def delete(self, pull_request):
821 def delete(self, pull_request):
822 pull_request = self.__get_pull_request(pull_request)
822 pull_request = self.__get_pull_request(pull_request)
823 self._cleanup_merge_workspace(pull_request)
823 self._cleanup_merge_workspace(pull_request)
824 Session().delete(pull_request)
824 Session().delete(pull_request)
825
825
826 def close_pull_request(self, pull_request, user):
826 def close_pull_request(self, pull_request, user):
827 pull_request = self.__get_pull_request(pull_request)
827 pull_request = self.__get_pull_request(pull_request)
828 self._cleanup_merge_workspace(pull_request)
828 self._cleanup_merge_workspace(pull_request)
829 pull_request.status = PullRequest.STATUS_CLOSED
829 pull_request.status = PullRequest.STATUS_CLOSED
830 pull_request.updated_on = datetime.datetime.now()
830 pull_request.updated_on = datetime.datetime.now()
831 Session().add(pull_request)
831 Session().add(pull_request)
832 self._trigger_pull_request_hook(
832 self._trigger_pull_request_hook(
833 pull_request, pull_request.author, 'close')
833 pull_request, pull_request.author, 'close')
834 self._log_action('user_closed_pull_request', user, pull_request)
834 self._log_action('user_closed_pull_request', user, pull_request)
835
835
836 def close_pull_request_with_comment(self, pull_request, user, repo,
836 def close_pull_request_with_comment(self, pull_request, user, repo,
837 message=None):
837 message=None):
838 status = ChangesetStatus.STATUS_REJECTED
838 status = ChangesetStatus.STATUS_REJECTED
839
839
840 if not message:
840 if not message:
841 message = (
841 message = (
842 _('Status change %(transition_icon)s %(status)s') % {
842 _('Status change %(transition_icon)s %(status)s') % {
843 'transition_icon': '>',
843 'transition_icon': '>',
844 'status': ChangesetStatus.get_status_lbl(status)})
844 'status': ChangesetStatus.get_status_lbl(status)})
845
845
846 internal_message = _('Closing with') + ' ' + message
846 internal_message = _('Closing with') + ' ' + message
847
847
848 comm = ChangesetCommentsModel().create(
848 comm = ChangesetCommentsModel().create(
849 text=internal_message,
849 text=internal_message,
850 repo=repo.repo_id,
850 repo=repo.repo_id,
851 user=user.user_id,
851 user=user.user_id,
852 pull_request=pull_request.pull_request_id,
852 pull_request=pull_request.pull_request_id,
853 f_path=None,
853 f_path=None,
854 line_no=None,
854 line_no=None,
855 status_change=ChangesetStatus.get_status_lbl(status),
855 status_change=ChangesetStatus.get_status_lbl(status),
856 closing_pr=True
856 closing_pr=True
857 )
857 )
858
858
859 ChangesetStatusModel().set_status(
859 ChangesetStatusModel().set_status(
860 repo.repo_id,
860 repo.repo_id,
861 status,
861 status,
862 user.user_id,
862 user.user_id,
863 comm,
863 comm,
864 pull_request=pull_request.pull_request_id
864 pull_request=pull_request.pull_request_id
865 )
865 )
866 Session().flush()
866 Session().flush()
867
867
868 PullRequestModel().close_pull_request(
868 PullRequestModel().close_pull_request(
869 pull_request.pull_request_id, user)
869 pull_request.pull_request_id, user)
870
870
871 def merge_status(self, pull_request):
871 def merge_status(self, pull_request):
872 if not self._is_merge_enabled(pull_request):
872 if not self._is_merge_enabled(pull_request):
873 return False, _('Server-side pull request merging is disabled.')
873 return False, _('Server-side pull request merging is disabled.')
874 if pull_request.is_closed():
874 if pull_request.is_closed():
875 return False, _('This pull request is closed.')
875 return False, _('This pull request is closed.')
876 merge_possible, msg = self._check_repo_requirements(
876 merge_possible, msg = self._check_repo_requirements(
877 target=pull_request.target_repo, source=pull_request.source_repo)
877 target=pull_request.target_repo, source=pull_request.source_repo)
878 if not merge_possible:
878 if not merge_possible:
879 return merge_possible, msg
879 return merge_possible, msg
880
880
881 try:
881 try:
882 resp = self._try_merge(pull_request)
882 resp = self._try_merge(pull_request)
883 status = resp.possible, self.merge_status_message(
883 status = resp.possible, self.merge_status_message(
884 resp.failure_reason)
884 resp.failure_reason)
885 except NotImplementedError:
885 except NotImplementedError:
886 status = False, _('Pull request merging is not supported.')
886 status = False, _('Pull request merging is not supported.')
887
887
888 return status
888 return status
889
889
890 def _check_repo_requirements(self, target, source):
890 def _check_repo_requirements(self, target, source):
891 """
891 """
892 Check if `target` and `source` have compatible requirements.
892 Check if `target` and `source` have compatible requirements.
893
893
894 Currently this is just checking for largefiles.
894 Currently this is just checking for largefiles.
895 """
895 """
896 target_has_largefiles = self._has_largefiles(target)
896 target_has_largefiles = self._has_largefiles(target)
897 source_has_largefiles = self._has_largefiles(source)
897 source_has_largefiles = self._has_largefiles(source)
898 merge_possible = True
898 merge_possible = True
899 message = u''
899 message = u''
900
900
901 if target_has_largefiles != source_has_largefiles:
901 if target_has_largefiles != source_has_largefiles:
902 merge_possible = False
902 merge_possible = False
903 if source_has_largefiles:
903 if source_has_largefiles:
904 message = _(
904 message = _(
905 'Target repository large files support is disabled.')
905 'Target repository large files support is disabled.')
906 else:
906 else:
907 message = _(
907 message = _(
908 'Source repository large files support is disabled.')
908 'Source repository large files support is disabled.')
909
909
910 return merge_possible, message
910 return merge_possible, message
911
911
912 def _has_largefiles(self, repo):
912 def _has_largefiles(self, repo):
913 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
913 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
914 'extensions', 'largefiles')
914 'extensions', 'largefiles')
915 return largefiles_ui and largefiles_ui[0].active
915 return largefiles_ui and largefiles_ui[0].active
916
916
917 def _try_merge(self, pull_request):
917 def _try_merge(self, pull_request):
918 """
918 """
919 Try to merge the pull request and return the merge status.
919 Try to merge the pull request and return the merge status.
920 """
920 """
921 log.debug(
921 log.debug(
922 "Trying out if the pull request %s can be merged.",
922 "Trying out if the pull request %s can be merged.",
923 pull_request.pull_request_id)
923 pull_request.pull_request_id)
924 target_vcs = pull_request.target_repo.scm_instance()
924 target_vcs = pull_request.target_repo.scm_instance()
925 target_ref = self._refresh_reference(
925 target_ref = self._refresh_reference(
926 pull_request.target_ref_parts, target_vcs)
926 pull_request.target_ref_parts, target_vcs)
927
927
928 target_locked = pull_request.target_repo.locked
928 target_locked = pull_request.target_repo.locked
929 if target_locked and target_locked[0]:
929 if target_locked and target_locked[0]:
930 log.debug("The target repository is locked.")
930 log.debug("The target repository is locked.")
931 merge_state = MergeResponse(
931 merge_state = MergeResponse(
932 False, False, None, MergeFailureReason.TARGET_IS_LOCKED)
932 False, False, None, MergeFailureReason.TARGET_IS_LOCKED)
933 elif self._needs_merge_state_refresh(pull_request, target_ref):
933 elif self._needs_merge_state_refresh(pull_request, target_ref):
934 log.debug("Refreshing the merge status of the repository.")
934 log.debug("Refreshing the merge status of the repository.")
935 merge_state = self._refresh_merge_state(
935 merge_state = self._refresh_merge_state(
936 pull_request, target_vcs, target_ref)
936 pull_request, target_vcs, target_ref)
937 else:
937 else:
938 possible = pull_request.\
938 possible = pull_request.\
939 _last_merge_status == MergeFailureReason.NONE
939 _last_merge_status == MergeFailureReason.NONE
940 merge_state = MergeResponse(
940 merge_state = MergeResponse(
941 possible, False, None, pull_request._last_merge_status)
941 possible, False, None, pull_request._last_merge_status)
942 log.debug("Merge response: %s", merge_state)
942 log.debug("Merge response: %s", merge_state)
943 return merge_state
943 return merge_state
944
944
945 def _refresh_reference(self, reference, vcs_repository):
945 def _refresh_reference(self, reference, vcs_repository):
946 if reference.type in ('branch', 'book'):
946 if reference.type in ('branch', 'book'):
947 name_or_id = reference.name
947 name_or_id = reference.name
948 else:
948 else:
949 name_or_id = reference.commit_id
949 name_or_id = reference.commit_id
950 refreshed_commit = vcs_repository.get_commit(name_or_id)
950 refreshed_commit = vcs_repository.get_commit(name_or_id)
951 refreshed_reference = Reference(
951 refreshed_reference = Reference(
952 reference.type, reference.name, refreshed_commit.raw_id)
952 reference.type, reference.name, refreshed_commit.raw_id)
953 return refreshed_reference
953 return refreshed_reference
954
954
955 def _needs_merge_state_refresh(self, pull_request, target_reference):
955 def _needs_merge_state_refresh(self, pull_request, target_reference):
956 return not(
956 return not(
957 pull_request.revisions and
957 pull_request.revisions and
958 pull_request.revisions[0] == pull_request._last_merge_source_rev and
958 pull_request.revisions[0] == pull_request._last_merge_source_rev and
959 target_reference.commit_id == pull_request._last_merge_target_rev)
959 target_reference.commit_id == pull_request._last_merge_target_rev)
960
960
961 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
961 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
962 workspace_id = self._workspace_id(pull_request)
962 workspace_id = self._workspace_id(pull_request)
963 source_vcs = pull_request.source_repo.scm_instance()
963 source_vcs = pull_request.source_repo.scm_instance()
964 use_rebase = self._use_rebase_for_merging(pull_request)
964 use_rebase = self._use_rebase_for_merging(pull_request)
965 merge_state = target_vcs.merge(
965 merge_state = target_vcs.merge(
966 target_reference, source_vcs, pull_request.source_ref_parts,
966 target_reference, source_vcs, pull_request.source_ref_parts,
967 workspace_id, dry_run=True, use_rebase=use_rebase)
967 workspace_id, dry_run=True, use_rebase=use_rebase)
968
968
969 # Do not store the response if there was an unknown error.
969 # Do not store the response if there was an unknown error.
970 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
970 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
971 pull_request._last_merge_source_rev = pull_request.\
971 pull_request._last_merge_source_rev = pull_request.\
972 source_ref_parts.commit_id
972 source_ref_parts.commit_id
973 pull_request._last_merge_target_rev = target_reference.commit_id
973 pull_request._last_merge_target_rev = target_reference.commit_id
974 pull_request._last_merge_status = (
974 pull_request._last_merge_status = (
975 merge_state.failure_reason)
975 merge_state.failure_reason)
976 Session().add(pull_request)
976 Session().add(pull_request)
977 Session().flush()
977 Session().flush()
978
978
979 return merge_state
979 return merge_state
980
980
981 def _workspace_id(self, pull_request):
981 def _workspace_id(self, pull_request):
982 workspace_id = 'pr-%s' % pull_request.pull_request_id
982 workspace_id = 'pr-%s' % pull_request.pull_request_id
983 return workspace_id
983 return workspace_id
984
984
985 def merge_status_message(self, status_code):
985 def merge_status_message(self, status_code):
986 """
986 """
987 Return a human friendly error message for the given merge status code.
987 Return a human friendly error message for the given merge status code.
988 """
988 """
989 return self.MERGE_STATUS_MESSAGES[status_code]
989 return self.MERGE_STATUS_MESSAGES[status_code]
990
990
991 def generate_repo_data(self, repo, commit_id=None, branch=None,
991 def generate_repo_data(self, repo, commit_id=None, branch=None,
992 bookmark=None):
992 bookmark=None):
993 all_refs, selected_ref = \
993 all_refs, selected_ref = \
994 self._get_repo_pullrequest_sources(
994 self._get_repo_pullrequest_sources(
995 repo.scm_instance(), commit_id=commit_id,
995 repo.scm_instance(), commit_id=commit_id,
996 branch=branch, bookmark=bookmark)
996 branch=branch, bookmark=bookmark)
997
997
998 refs_select2 = []
998 refs_select2 = []
999 for element in all_refs:
999 for element in all_refs:
1000 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1000 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1001 refs_select2.append({'text': element[1], 'children': children})
1001 refs_select2.append({'text': element[1], 'children': children})
1002
1002
1003 return {
1003 return {
1004 'user': {
1004 'user': {
1005 'user_id': repo.user.user_id,
1005 'user_id': repo.user.user_id,
1006 'username': repo.user.username,
1006 'username': repo.user.username,
1007 'firstname': repo.user.firstname,
1007 'firstname': repo.user.firstname,
1008 'lastname': repo.user.lastname,
1008 'lastname': repo.user.lastname,
1009 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1009 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1010 },
1010 },
1011 'description': h.chop_at_smart(repo.description, '\n'),
1011 'description': h.chop_at_smart(repo.description, '\n'),
1012 'refs': {
1012 'refs': {
1013 'all_refs': all_refs,
1013 'all_refs': all_refs,
1014 'selected_ref': selected_ref,
1014 'selected_ref': selected_ref,
1015 'select2_refs': refs_select2
1015 'select2_refs': refs_select2
1016 }
1016 }
1017 }
1017 }
1018
1018
1019 def generate_pullrequest_title(self, source, source_ref, target):
1019 def generate_pullrequest_title(self, source, source_ref, target):
1020 return '{source}#{at_ref} to {target}'.format(
1020 return '{source}#{at_ref} to {target}'.format(
1021 source=source,
1021 source=source,
1022 at_ref=source_ref,
1022 at_ref=source_ref,
1023 target=target,
1023 target=target,
1024 )
1024 )
1025
1025
1026 def _cleanup_merge_workspace(self, pull_request):
1026 def _cleanup_merge_workspace(self, pull_request):
1027 # Merging related cleanup
1027 # Merging related cleanup
1028 target_scm = pull_request.target_repo.scm_instance()
1028 target_scm = pull_request.target_repo.scm_instance()
1029 workspace_id = 'pr-%s' % pull_request.pull_request_id
1029 workspace_id = 'pr-%s' % pull_request.pull_request_id
1030
1030
1031 try:
1031 try:
1032 target_scm.cleanup_merge_workspace(workspace_id)
1032 target_scm.cleanup_merge_workspace(workspace_id)
1033 except NotImplementedError:
1033 except NotImplementedError:
1034 pass
1034 pass
1035
1035
1036 def _get_repo_pullrequest_sources(
1036 def _get_repo_pullrequest_sources(
1037 self, repo, commit_id=None, branch=None, bookmark=None):
1037 self, repo, commit_id=None, branch=None, bookmark=None):
1038 """
1038 """
1039 Return a structure with repo's interesting commits, suitable for
1039 Return a structure with repo's interesting commits, suitable for
1040 the selectors in pullrequest controller
1040 the selectors in pullrequest controller
1041
1041
1042 :param commit_id: a commit that must be in the list somehow
1042 :param commit_id: a commit that must be in the list somehow
1043 and selected by default
1043 and selected by default
1044 :param branch: a branch that must be in the list and selected
1044 :param branch: a branch that must be in the list and selected
1045 by default - even if closed
1045 by default - even if closed
1046 :param bookmark: a bookmark that must be in the list and selected
1046 :param bookmark: a bookmark that must be in the list and selected
1047 """
1047 """
1048
1048
1049 commit_id = safe_str(commit_id) if commit_id else None
1049 commit_id = safe_str(commit_id) if commit_id else None
1050 branch = safe_str(branch) if branch else None
1050 branch = safe_str(branch) if branch else None
1051 bookmark = safe_str(bookmark) if bookmark else None
1051 bookmark = safe_str(bookmark) if bookmark else None
1052
1052
1053 selected = None
1053 selected = None
1054
1054
1055 # order matters: first source that has commit_id in it will be selected
1055 # order matters: first source that has commit_id in it will be selected
1056 sources = []
1056 sources = []
1057 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1057 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1058 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1058 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1059
1059
1060 if commit_id:
1060 if commit_id:
1061 ref_commit = (h.short_id(commit_id), commit_id)
1061 ref_commit = (h.short_id(commit_id), commit_id)
1062 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1062 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1063
1063
1064 sources.append(
1064 sources.append(
1065 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1065 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1066 )
1066 )
1067
1067
1068 groups = []
1068 groups = []
1069 for group_key, ref_list, group_name, match in sources:
1069 for group_key, ref_list, group_name, match in sources:
1070 group_refs = []
1070 group_refs = []
1071 for ref_name, ref_id in ref_list:
1071 for ref_name, ref_id in ref_list:
1072 ref_key = '%s:%s:%s' % (group_key, ref_name, ref_id)
1072 ref_key = '%s:%s:%s' % (group_key, ref_name, ref_id)
1073 group_refs.append((ref_key, ref_name))
1073 group_refs.append((ref_key, ref_name))
1074
1074
1075 if not selected:
1075 if not selected:
1076 if set([commit_id, match]) & set([ref_id, ref_name]):
1076 if set([commit_id, match]) & set([ref_id, ref_name]):
1077 selected = ref_key
1077 selected = ref_key
1078
1078
1079 if group_refs:
1079 if group_refs:
1080 groups.append((group_refs, group_name))
1080 groups.append((group_refs, group_name))
1081
1081
1082 if not selected:
1082 if not selected:
1083 ref = commit_id or branch or bookmark
1083 ref = commit_id or branch or bookmark
1084 if ref:
1084 if ref:
1085 raise CommitDoesNotExistError(
1085 raise CommitDoesNotExistError(
1086 'No commit refs could be found matching: %s' % ref)
1086 'No commit refs could be found matching: %s' % ref)
1087 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1087 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1088 selected = 'branch:%s:%s' % (
1088 selected = 'branch:%s:%s' % (
1089 repo.DEFAULT_BRANCH_NAME,
1089 repo.DEFAULT_BRANCH_NAME,
1090 repo.branches[repo.DEFAULT_BRANCH_NAME]
1090 repo.branches[repo.DEFAULT_BRANCH_NAME]
1091 )
1091 )
1092 elif repo.commit_ids:
1092 elif repo.commit_ids:
1093 rev = repo.commit_ids[0]
1093 rev = repo.commit_ids[0]
1094 selected = 'rev:%s:%s' % (rev, rev)
1094 selected = 'rev:%s:%s' % (rev, rev)
1095 else:
1095 else:
1096 raise EmptyRepositoryError()
1096 raise EmptyRepositoryError()
1097 return groups, selected
1097 return groups, selected
1098
1098
1099 def get_diff(self, pull_request, context=DIFF_CONTEXT):
1099 def get_diff(self, pull_request, context=DIFF_CONTEXT):
1100 pull_request = self.__get_pull_request(pull_request)
1100 pull_request = self.__get_pull_request(pull_request)
1101 return self._get_diff_from_pr_or_version(pull_request, context=context)
1101 return self._get_diff_from_pr_or_version(pull_request, context=context)
1102
1102
1103 def _get_diff_from_pr_or_version(self, pr_or_version, context):
1103 def _get_diff_from_pr_or_version(self, pr_or_version, context):
1104 source_repo = pr_or_version.source_repo
1104 source_repo = pr_or_version.source_repo
1105
1105
1106 # we swap org/other ref since we run a simple diff on one repo
1106 # we swap org/other ref since we run a simple diff on one repo
1107 target_ref_id = pr_or_version.target_ref_parts.commit_id
1107 target_ref_id = pr_or_version.target_ref_parts.commit_id
1108 source_ref_id = pr_or_version.source_ref_parts.commit_id
1108 source_ref_id = pr_or_version.source_ref_parts.commit_id
1109 target_commit = source_repo.get_commit(
1109 target_commit = source_repo.get_commit(
1110 commit_id=safe_str(target_ref_id))
1110 commit_id=safe_str(target_ref_id))
1111 source_commit = source_repo.get_commit(commit_id=safe_str(source_ref_id))
1111 source_commit = source_repo.get_commit(commit_id=safe_str(source_ref_id))
1112 vcs_repo = source_repo.scm_instance()
1112 vcs_repo = source_repo.scm_instance()
1113
1113
1114 # TODO: johbo: In the context of an update, we cannot reach
1114 # TODO: johbo: In the context of an update, we cannot reach
1115 # the old commit anymore with our normal mechanisms. It needs
1115 # the old commit anymore with our normal mechanisms. It needs
1116 # some sort of special support in the vcs layer to avoid this
1116 # some sort of special support in the vcs layer to avoid this
1117 # workaround.
1117 # workaround.
1118 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1118 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1119 vcs_repo.alias == 'git'):
1119 vcs_repo.alias == 'git'):
1120 source_commit.raw_id = safe_str(source_ref_id)
1120 source_commit.raw_id = safe_str(source_ref_id)
1121
1121
1122 log.debug('calculating diff between '
1122 log.debug('calculating diff between '
1123 'source_ref:%s and target_ref:%s for repo `%s`',
1123 'source_ref:%s and target_ref:%s for repo `%s`',
1124 target_ref_id, source_ref_id,
1124 target_ref_id, source_ref_id,
1125 safe_unicode(vcs_repo.path))
1125 safe_unicode(vcs_repo.path))
1126
1126
1127 vcs_diff = vcs_repo.get_diff(
1127 vcs_diff = vcs_repo.get_diff(
1128 commit1=target_commit, commit2=source_commit, context=context)
1128 commit1=target_commit, commit2=source_commit, context=context)
1129 return vcs_diff
1129 return vcs_diff
1130
1130
1131 def _is_merge_enabled(self, pull_request):
1131 def _is_merge_enabled(self, pull_request):
1132 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1132 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1133 settings = settings_model.get_general_settings()
1133 settings = settings_model.get_general_settings()
1134 return settings.get('rhodecode_pr_merge_enabled', False)
1134 return settings.get('rhodecode_pr_merge_enabled', False)
1135
1135
1136 def _use_rebase_for_merging(self, pull_request):
1136 def _use_rebase_for_merging(self, pull_request):
1137 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1137 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1138 settings = settings_model.get_general_settings()
1138 settings = settings_model.get_general_settings()
1139 return settings.get('rhodecode_hg_use_rebase_for_merging', False)
1139 return settings.get('rhodecode_hg_use_rebase_for_merging', False)
1140
1140
1141 def _log_action(self, action, user, pull_request):
1141 def _log_action(self, action, user, pull_request):
1142 action_logger(
1142 action_logger(
1143 user,
1143 user,
1144 '{action}:{pr_id}'.format(
1144 '{action}:{pr_id}'.format(
1145 action=action, pr_id=pull_request.pull_request_id),
1145 action=action, pr_id=pull_request.pull_request_id),
1146 pull_request.target_repo)
1146 pull_request.target_repo)
1147
1147
1148
1148
1149 ChangeTuple = namedtuple('ChangeTuple',
1149 ChangeTuple = namedtuple('ChangeTuple',
1150 ['added', 'common', 'removed'])
1150 ['added', 'common', 'removed'])
1151
1151
1152 FileChangeTuple = namedtuple('FileChangeTuple',
1152 FileChangeTuple = namedtuple('FileChangeTuple',
1153 ['added', 'modified', 'removed'])
1153 ['added', 'modified', 'removed'])
@@ -1,934 +1,934 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Repository model for rhodecode
22 Repository model for rhodecode
23 """
23 """
24
24
25 import logging
25 import logging
26 import os
26 import os
27 import re
27 import re
28 import shutil
28 import shutil
29 import time
29 import time
30 import traceback
30 import traceback
31 from datetime import datetime
31 from datetime import datetime
32
32
33 from sqlalchemy.sql import func
33 from sqlalchemy.sql import func
34 from sqlalchemy.sql.expression import true, or_
34 from sqlalchemy.sql.expression import true, or_
35 from zope.cachedescriptors.property import Lazy as LazyProperty
35 from zope.cachedescriptors.property import Lazy as LazyProperty
36
36
37 from rhodecode import events
37 from rhodecode import events
38 from rhodecode.lib import helpers as h
38 from rhodecode.lib import helpers as h
39 from rhodecode.lib.auth import HasUserGroupPermissionAny
39 from rhodecode.lib.auth import HasUserGroupPermissionAny
40 from rhodecode.lib.caching_query import FromCache
40 from rhodecode.lib.caching_query import FromCache
41 from rhodecode.lib.exceptions import AttachedForksError
41 from rhodecode.lib.exceptions import AttachedForksError
42 from rhodecode.lib.hooks_base import log_delete_repository
42 from rhodecode.lib.hooks_base import log_delete_repository
43 from rhodecode.lib.utils import make_db_config
43 from rhodecode.lib.utils import make_db_config
44 from rhodecode.lib.utils2 import (
44 from rhodecode.lib.utils2 import (
45 safe_str, safe_unicode, remove_prefix, obfuscate_url_pw,
45 safe_str, safe_unicode, remove_prefix, obfuscate_url_pw,
46 get_current_rhodecode_user, safe_int, datetime_to_time, action_logger_generic)
46 get_current_rhodecode_user, safe_int, datetime_to_time, action_logger_generic)
47 from rhodecode.lib.vcs.backends import get_backend
47 from rhodecode.lib.vcs.backends import get_backend
48 from rhodecode.model import BaseModel
48 from rhodecode.model import BaseModel
49 from rhodecode.model.db import (
49 from rhodecode.model.db import (
50 Repository, UserRepoToPerm, UserGroupRepoToPerm, UserRepoGroupToPerm,
50 Repository, UserRepoToPerm, UserGroupRepoToPerm, UserRepoGroupToPerm,
51 UserGroupRepoGroupToPerm, User, Permission, Statistics, UserGroup,
51 UserGroupRepoGroupToPerm, User, Permission, Statistics, UserGroup,
52 RepoGroup, RepositoryField)
52 RepoGroup, RepositoryField)
53 from rhodecode.model.scm import UserGroupList
53 from rhodecode.model.scm import UserGroupList
54 from rhodecode.model.settings import VcsSettingsModel
54 from rhodecode.model.settings import VcsSettingsModel
55
55
56
56
57 log = logging.getLogger(__name__)
57 log = logging.getLogger(__name__)
58
58
59
59
60 class RepoModel(BaseModel):
60 class RepoModel(BaseModel):
61
61
62 cls = Repository
62 cls = Repository
63
63
64 def _get_user_group(self, users_group):
64 def _get_user_group(self, users_group):
65 return self._get_instance(UserGroup, users_group,
65 return self._get_instance(UserGroup, users_group,
66 callback=UserGroup.get_by_group_name)
66 callback=UserGroup.get_by_group_name)
67
67
68 def _get_repo_group(self, repo_group):
68 def _get_repo_group(self, repo_group):
69 return self._get_instance(RepoGroup, repo_group,
69 return self._get_instance(RepoGroup, repo_group,
70 callback=RepoGroup.get_by_group_name)
70 callback=RepoGroup.get_by_group_name)
71
71
72 def _create_default_perms(self, repository, private):
72 def _create_default_perms(self, repository, private):
73 # create default permission
73 # create default permission
74 default = 'repository.read'
74 default = 'repository.read'
75 def_user = User.get_default_user()
75 def_user = User.get_default_user()
76 for p in def_user.user_perms:
76 for p in def_user.user_perms:
77 if p.permission.permission_name.startswith('repository.'):
77 if p.permission.permission_name.startswith('repository.'):
78 default = p.permission.permission_name
78 default = p.permission.permission_name
79 break
79 break
80
80
81 default_perm = 'repository.none' if private else default
81 default_perm = 'repository.none' if private else default
82
82
83 repo_to_perm = UserRepoToPerm()
83 repo_to_perm = UserRepoToPerm()
84 repo_to_perm.permission = Permission.get_by_key(default_perm)
84 repo_to_perm.permission = Permission.get_by_key(default_perm)
85
85
86 repo_to_perm.repository = repository
86 repo_to_perm.repository = repository
87 repo_to_perm.user_id = def_user.user_id
87 repo_to_perm.user_id = def_user.user_id
88
88
89 return repo_to_perm
89 return repo_to_perm
90
90
91 @LazyProperty
91 @LazyProperty
92 def repos_path(self):
92 def repos_path(self):
93 """
93 """
94 Gets the repositories root path from database
94 Gets the repositories root path from database
95 """
95 """
96 settings_model = VcsSettingsModel(sa=self.sa)
96 settings_model = VcsSettingsModel(sa=self.sa)
97 return settings_model.get_repos_location()
97 return settings_model.get_repos_location()
98
98
99 def get(self, repo_id, cache=False):
99 def get(self, repo_id, cache=False):
100 repo = self.sa.query(Repository) \
100 repo = self.sa.query(Repository) \
101 .filter(Repository.repo_id == repo_id)
101 .filter(Repository.repo_id == repo_id)
102
102
103 if cache:
103 if cache:
104 repo = repo.options(FromCache("sql_cache_short",
104 repo = repo.options(FromCache("sql_cache_short",
105 "get_repo_%s" % repo_id))
105 "get_repo_%s" % repo_id))
106 return repo.scalar()
106 return repo.scalar()
107
107
108 def get_repo(self, repository):
108 def get_repo(self, repository):
109 return self._get_repo(repository)
109 return self._get_repo(repository)
110
110
111 def get_by_repo_name(self, repo_name, cache=False):
111 def get_by_repo_name(self, repo_name, cache=False):
112 repo = self.sa.query(Repository) \
112 repo = self.sa.query(Repository) \
113 .filter(Repository.repo_name == repo_name)
113 .filter(Repository.repo_name == repo_name)
114
114
115 if cache:
115 if cache:
116 repo = repo.options(FromCache("sql_cache_short",
116 repo = repo.options(FromCache("sql_cache_short",
117 "get_repo_%s" % repo_name))
117 "get_repo_%s" % repo_name))
118 return repo.scalar()
118 return repo.scalar()
119
119
120 def _extract_id_from_repo_name(self, repo_name):
120 def _extract_id_from_repo_name(self, repo_name):
121 if repo_name.startswith('/'):
121 if repo_name.startswith('/'):
122 repo_name = repo_name.lstrip('/')
122 repo_name = repo_name.lstrip('/')
123 by_id_match = re.match(r'^_(\d{1,})', repo_name)
123 by_id_match = re.match(r'^_(\d{1,})', repo_name)
124 if by_id_match:
124 if by_id_match:
125 return by_id_match.groups()[0]
125 return by_id_match.groups()[0]
126
126
127 def get_repo_by_id(self, repo_name):
127 def get_repo_by_id(self, repo_name):
128 """
128 """
129 Extracts repo_name by id from special urls.
129 Extracts repo_name by id from special urls.
130 Example url is _11/repo_name
130 Example url is _11/repo_name
131
131
132 :param repo_name:
132 :param repo_name:
133 :return: repo object if matched else None
133 :return: repo object if matched else None
134 """
134 """
135 try:
135 try:
136 _repo_id = self._extract_id_from_repo_name(repo_name)
136 _repo_id = self._extract_id_from_repo_name(repo_name)
137 if _repo_id:
137 if _repo_id:
138 return self.get(_repo_id)
138 return self.get(_repo_id)
139 except Exception:
139 except Exception:
140 log.exception('Failed to extract repo_name from URL')
140 log.exception('Failed to extract repo_name from URL')
141
141
142 return None
142 return None
143
143
144 def get_url(self, repo):
144 def get_url(self, repo):
145 return url('summary_home', repo_name=repo.repo_name, qualified=True)
145 return h.url('summary_home', repo_name=repo.repo_name, qualified=True)
146
146
147 def get_users(self, name_contains=None, limit=20, only_active=True):
147 def get_users(self, name_contains=None, limit=20, only_active=True):
148 # TODO: mikhail: move this method to the UserModel.
148 # TODO: mikhail: move this method to the UserModel.
149 query = self.sa.query(User)
149 query = self.sa.query(User)
150 if only_active:
150 if only_active:
151 query = query.filter(User.active == true())
151 query = query.filter(User.active == true())
152
152
153 if name_contains:
153 if name_contains:
154 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
154 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
155 query = query.filter(
155 query = query.filter(
156 or_(
156 or_(
157 User.name.ilike(ilike_expression),
157 User.name.ilike(ilike_expression),
158 User.lastname.ilike(ilike_expression),
158 User.lastname.ilike(ilike_expression),
159 User.username.ilike(ilike_expression)
159 User.username.ilike(ilike_expression)
160 )
160 )
161 )
161 )
162 query = query.limit(limit)
162 query = query.limit(limit)
163 users = query.all()
163 users = query.all()
164
164
165 _users = [
165 _users = [
166 {
166 {
167 'id': user.user_id,
167 'id': user.user_id,
168 'first_name': user.name,
168 'first_name': user.name,
169 'last_name': user.lastname,
169 'last_name': user.lastname,
170 'username': user.username,
170 'username': user.username,
171 'icon_link': h.gravatar_url(user.email, 14),
171 'icon_link': h.gravatar_url(user.email, 14),
172 'value_display': h.person(user.email),
172 'value_display': h.person(user.email),
173 'value': user.username,
173 'value': user.username,
174 'value_type': 'user',
174 'value_type': 'user',
175 'active': user.active,
175 'active': user.active,
176 }
176 }
177 for user in users
177 for user in users
178 ]
178 ]
179 return _users
179 return _users
180
180
181 def get_user_groups(self, name_contains=None, limit=20, only_active=True):
181 def get_user_groups(self, name_contains=None, limit=20, only_active=True):
182 # TODO: mikhail: move this method to the UserGroupModel.
182 # TODO: mikhail: move this method to the UserGroupModel.
183 query = self.sa.query(UserGroup)
183 query = self.sa.query(UserGroup)
184 if only_active:
184 if only_active:
185 query = query.filter(UserGroup.users_group_active == true())
185 query = query.filter(UserGroup.users_group_active == true())
186
186
187 if name_contains:
187 if name_contains:
188 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
188 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
189 query = query.filter(
189 query = query.filter(
190 UserGroup.users_group_name.ilike(ilike_expression))\
190 UserGroup.users_group_name.ilike(ilike_expression))\
191 .order_by(func.length(UserGroup.users_group_name))\
191 .order_by(func.length(UserGroup.users_group_name))\
192 .order_by(UserGroup.users_group_name)
192 .order_by(UserGroup.users_group_name)
193
193
194 query = query.limit(limit)
194 query = query.limit(limit)
195 user_groups = query.all()
195 user_groups = query.all()
196 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
196 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
197 user_groups = UserGroupList(user_groups, perm_set=perm_set)
197 user_groups = UserGroupList(user_groups, perm_set=perm_set)
198
198
199 _groups = [
199 _groups = [
200 {
200 {
201 'id': group.users_group_id,
201 'id': group.users_group_id,
202 # TODO: marcink figure out a way to generate the url for the
202 # TODO: marcink figure out a way to generate the url for the
203 # icon
203 # icon
204 'icon_link': '',
204 'icon_link': '',
205 'value_display': 'Group: %s (%d members)' % (
205 'value_display': 'Group: %s (%d members)' % (
206 group.users_group_name, len(group.members),),
206 group.users_group_name, len(group.members),),
207 'value': group.users_group_name,
207 'value': group.users_group_name,
208 'value_type': 'user_group',
208 'value_type': 'user_group',
209 'active': group.users_group_active,
209 'active': group.users_group_active,
210 }
210 }
211 for group in user_groups
211 for group in user_groups
212 ]
212 ]
213 return _groups
213 return _groups
214
214
215 @classmethod
215 @classmethod
216 def update_repoinfo(cls, repositories=None):
216 def update_repoinfo(cls, repositories=None):
217 if not repositories:
217 if not repositories:
218 repositories = Repository.getAll()
218 repositories = Repository.getAll()
219 for repo in repositories:
219 for repo in repositories:
220 repo.update_commit_cache()
220 repo.update_commit_cache()
221
221
222 def get_repos_as_dict(self, repo_list=None, admin=False,
222 def get_repos_as_dict(self, repo_list=None, admin=False,
223 super_user_actions=False):
223 super_user_actions=False):
224
224
225 from rhodecode.lib.utils import PartialRenderer
225 from rhodecode.lib.utils import PartialRenderer
226 _render = PartialRenderer('data_table/_dt_elements.html')
226 _render = PartialRenderer('data_table/_dt_elements.html')
227 c = _render.c
227 c = _render.c
228
228
229 def quick_menu(repo_name):
229 def quick_menu(repo_name):
230 return _render('quick_menu', repo_name)
230 return _render('quick_menu', repo_name)
231
231
232 def repo_lnk(name, rtype, rstate, private, fork_of):
232 def repo_lnk(name, rtype, rstate, private, fork_of):
233 return _render('repo_name', name, rtype, rstate, private, fork_of,
233 return _render('repo_name', name, rtype, rstate, private, fork_of,
234 short_name=not admin, admin=False)
234 short_name=not admin, admin=False)
235
235
236 def last_change(last_change):
236 def last_change(last_change):
237 return _render("last_change", last_change)
237 return _render("last_change", last_change)
238
238
239 def rss_lnk(repo_name):
239 def rss_lnk(repo_name):
240 return _render("rss", repo_name)
240 return _render("rss", repo_name)
241
241
242 def atom_lnk(repo_name):
242 def atom_lnk(repo_name):
243 return _render("atom", repo_name)
243 return _render("atom", repo_name)
244
244
245 def last_rev(repo_name, cs_cache):
245 def last_rev(repo_name, cs_cache):
246 return _render('revision', repo_name, cs_cache.get('revision'),
246 return _render('revision', repo_name, cs_cache.get('revision'),
247 cs_cache.get('raw_id'), cs_cache.get('author'),
247 cs_cache.get('raw_id'), cs_cache.get('author'),
248 cs_cache.get('message'))
248 cs_cache.get('message'))
249
249
250 def desc(desc):
250 def desc(desc):
251 if c.visual.stylify_metatags:
251 if c.visual.stylify_metatags:
252 return h.urlify_text(h.escaped_stylize(h.truncate(desc, 60)))
252 return h.urlify_text(h.escaped_stylize(h.truncate(desc, 60)))
253 else:
253 else:
254 return h.urlify_text(h.html_escape(h.truncate(desc, 60)))
254 return h.urlify_text(h.html_escape(h.truncate(desc, 60)))
255
255
256 def state(repo_state):
256 def state(repo_state):
257 return _render("repo_state", repo_state)
257 return _render("repo_state", repo_state)
258
258
259 def repo_actions(repo_name):
259 def repo_actions(repo_name):
260 return _render('repo_actions', repo_name, super_user_actions)
260 return _render('repo_actions', repo_name, super_user_actions)
261
261
262 def user_profile(username):
262 def user_profile(username):
263 return _render('user_profile', username)
263 return _render('user_profile', username)
264
264
265 repos_data = []
265 repos_data = []
266 for repo in repo_list:
266 for repo in repo_list:
267 cs_cache = repo.changeset_cache
267 cs_cache = repo.changeset_cache
268 row = {
268 row = {
269 "menu": quick_menu(repo.repo_name),
269 "menu": quick_menu(repo.repo_name),
270
270
271 "name": repo_lnk(repo.repo_name, repo.repo_type,
271 "name": repo_lnk(repo.repo_name, repo.repo_type,
272 repo.repo_state, repo.private, repo.fork),
272 repo.repo_state, repo.private, repo.fork),
273 "name_raw": repo.repo_name.lower(),
273 "name_raw": repo.repo_name.lower(),
274
274
275 "last_change": last_change(repo.last_db_change),
275 "last_change": last_change(repo.last_db_change),
276 "last_change_raw": datetime_to_time(repo.last_db_change),
276 "last_change_raw": datetime_to_time(repo.last_db_change),
277
277
278 "last_changeset": last_rev(repo.repo_name, cs_cache),
278 "last_changeset": last_rev(repo.repo_name, cs_cache),
279 "last_changeset_raw": cs_cache.get('revision'),
279 "last_changeset_raw": cs_cache.get('revision'),
280
280
281 "desc": desc(repo.description),
281 "desc": desc(repo.description),
282 "owner": user_profile(repo.user.username),
282 "owner": user_profile(repo.user.username),
283
283
284 "state": state(repo.repo_state),
284 "state": state(repo.repo_state),
285 "rss": rss_lnk(repo.repo_name),
285 "rss": rss_lnk(repo.repo_name),
286
286
287 "atom": atom_lnk(repo.repo_name),
287 "atom": atom_lnk(repo.repo_name),
288 }
288 }
289 if admin:
289 if admin:
290 row.update({
290 row.update({
291 "action": repo_actions(repo.repo_name),
291 "action": repo_actions(repo.repo_name),
292 })
292 })
293 repos_data.append(row)
293 repos_data.append(row)
294
294
295 return repos_data
295 return repos_data
296
296
297 def _get_defaults(self, repo_name):
297 def _get_defaults(self, repo_name):
298 """
298 """
299 Gets information about repository, and returns a dict for
299 Gets information about repository, and returns a dict for
300 usage in forms
300 usage in forms
301
301
302 :param repo_name:
302 :param repo_name:
303 """
303 """
304
304
305 repo_info = Repository.get_by_repo_name(repo_name)
305 repo_info = Repository.get_by_repo_name(repo_name)
306
306
307 if repo_info is None:
307 if repo_info is None:
308 return None
308 return None
309
309
310 defaults = repo_info.get_dict()
310 defaults = repo_info.get_dict()
311 defaults['repo_name'] = repo_info.just_name
311 defaults['repo_name'] = repo_info.just_name
312
312
313 groups = repo_info.groups_with_parents
313 groups = repo_info.groups_with_parents
314 parent_group = groups[-1] if groups else None
314 parent_group = groups[-1] if groups else None
315
315
316 # we use -1 as this is how in HTML, we mark an empty group
316 # we use -1 as this is how in HTML, we mark an empty group
317 defaults['repo_group'] = getattr(parent_group, 'group_id', -1)
317 defaults['repo_group'] = getattr(parent_group, 'group_id', -1)
318
318
319 keys_to_process = (
319 keys_to_process = (
320 {'k': 'repo_type', 'strip': False},
320 {'k': 'repo_type', 'strip': False},
321 {'k': 'repo_enable_downloads', 'strip': True},
321 {'k': 'repo_enable_downloads', 'strip': True},
322 {'k': 'repo_description', 'strip': True},
322 {'k': 'repo_description', 'strip': True},
323 {'k': 'repo_enable_locking', 'strip': True},
323 {'k': 'repo_enable_locking', 'strip': True},
324 {'k': 'repo_landing_rev', 'strip': True},
324 {'k': 'repo_landing_rev', 'strip': True},
325 {'k': 'clone_uri', 'strip': False},
325 {'k': 'clone_uri', 'strip': False},
326 {'k': 'repo_private', 'strip': True},
326 {'k': 'repo_private', 'strip': True},
327 {'k': 'repo_enable_statistics', 'strip': True}
327 {'k': 'repo_enable_statistics', 'strip': True}
328 )
328 )
329
329
330 for item in keys_to_process:
330 for item in keys_to_process:
331 attr = item['k']
331 attr = item['k']
332 if item['strip']:
332 if item['strip']:
333 attr = remove_prefix(item['k'], 'repo_')
333 attr = remove_prefix(item['k'], 'repo_')
334
334
335 val = defaults[attr]
335 val = defaults[attr]
336 if item['k'] == 'repo_landing_rev':
336 if item['k'] == 'repo_landing_rev':
337 val = ':'.join(defaults[attr])
337 val = ':'.join(defaults[attr])
338 defaults[item['k']] = val
338 defaults[item['k']] = val
339 if item['k'] == 'clone_uri':
339 if item['k'] == 'clone_uri':
340 defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden
340 defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden
341
341
342 # fill owner
342 # fill owner
343 if repo_info.user:
343 if repo_info.user:
344 defaults.update({'user': repo_info.user.username})
344 defaults.update({'user': repo_info.user.username})
345 else:
345 else:
346 replacement_user = User.get_first_super_admin().username
346 replacement_user = User.get_first_super_admin().username
347 defaults.update({'user': replacement_user})
347 defaults.update({'user': replacement_user})
348
348
349 # fill repository users
349 # fill repository users
350 for p in repo_info.repo_to_perm:
350 for p in repo_info.repo_to_perm:
351 defaults.update({'u_perm_%s' % p.user.user_id:
351 defaults.update({'u_perm_%s' % p.user.user_id:
352 p.permission.permission_name})
352 p.permission.permission_name})
353
353
354 # fill repository groups
354 # fill repository groups
355 for p in repo_info.users_group_to_perm:
355 for p in repo_info.users_group_to_perm:
356 defaults.update({'g_perm_%s' % p.users_group.users_group_id:
356 defaults.update({'g_perm_%s' % p.users_group.users_group_id:
357 p.permission.permission_name})
357 p.permission.permission_name})
358
358
359 return defaults
359 return defaults
360
360
361 def update(self, repo, **kwargs):
361 def update(self, repo, **kwargs):
362 try:
362 try:
363 cur_repo = self._get_repo(repo)
363 cur_repo = self._get_repo(repo)
364 source_repo_name = cur_repo.repo_name
364 source_repo_name = cur_repo.repo_name
365 if 'user' in kwargs:
365 if 'user' in kwargs:
366 cur_repo.user = User.get_by_username(kwargs['user'])
366 cur_repo.user = User.get_by_username(kwargs['user'])
367
367
368 if 'repo_group' in kwargs:
368 if 'repo_group' in kwargs:
369 cur_repo.group = RepoGroup.get(kwargs['repo_group'])
369 cur_repo.group = RepoGroup.get(kwargs['repo_group'])
370 log.debug('Updating repo %s with params:%s', cur_repo, kwargs)
370 log.debug('Updating repo %s with params:%s', cur_repo, kwargs)
371
371
372 update_keys = [
372 update_keys = [
373 (1, 'repo_enable_downloads'),
373 (1, 'repo_enable_downloads'),
374 (1, 'repo_description'),
374 (1, 'repo_description'),
375 (1, 'repo_enable_locking'),
375 (1, 'repo_enable_locking'),
376 (1, 'repo_landing_rev'),
376 (1, 'repo_landing_rev'),
377 (1, 'repo_private'),
377 (1, 'repo_private'),
378 (1, 'repo_enable_statistics'),
378 (1, 'repo_enable_statistics'),
379 (0, 'clone_uri'),
379 (0, 'clone_uri'),
380 (0, 'fork_id')
380 (0, 'fork_id')
381 ]
381 ]
382 for strip, k in update_keys:
382 for strip, k in update_keys:
383 if k in kwargs:
383 if k in kwargs:
384 val = kwargs[k]
384 val = kwargs[k]
385 if strip:
385 if strip:
386 k = remove_prefix(k, 'repo_')
386 k = remove_prefix(k, 'repo_')
387 if k == 'clone_uri':
387 if k == 'clone_uri':
388 from rhodecode.model.validators import Missing
388 from rhodecode.model.validators import Missing
389 _change = kwargs.get('clone_uri_change')
389 _change = kwargs.get('clone_uri_change')
390 if _change in [Missing, 'OLD']:
390 if _change in [Missing, 'OLD']:
391 # we don't change the value, so use original one
391 # we don't change the value, so use original one
392 val = cur_repo.clone_uri
392 val = cur_repo.clone_uri
393
393
394 setattr(cur_repo, k, val)
394 setattr(cur_repo, k, val)
395
395
396 new_name = cur_repo.get_new_name(kwargs['repo_name'])
396 new_name = cur_repo.get_new_name(kwargs['repo_name'])
397 cur_repo.repo_name = new_name
397 cur_repo.repo_name = new_name
398
398
399 # if private flag is set, reset default permission to NONE
399 # if private flag is set, reset default permission to NONE
400 if kwargs.get('repo_private'):
400 if kwargs.get('repo_private'):
401 EMPTY_PERM = 'repository.none'
401 EMPTY_PERM = 'repository.none'
402 RepoModel().grant_user_permission(
402 RepoModel().grant_user_permission(
403 repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM
403 repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM
404 )
404 )
405
405
406 # handle extra fields
406 # handle extra fields
407 for field in filter(lambda k: k.startswith(RepositoryField.PREFIX),
407 for field in filter(lambda k: k.startswith(RepositoryField.PREFIX),
408 kwargs):
408 kwargs):
409 k = RepositoryField.un_prefix_key(field)
409 k = RepositoryField.un_prefix_key(field)
410 ex_field = RepositoryField.get_by_key_name(
410 ex_field = RepositoryField.get_by_key_name(
411 key=k, repo=cur_repo)
411 key=k, repo=cur_repo)
412 if ex_field:
412 if ex_field:
413 ex_field.field_value = kwargs[field]
413 ex_field.field_value = kwargs[field]
414 self.sa.add(ex_field)
414 self.sa.add(ex_field)
415 self.sa.add(cur_repo)
415 self.sa.add(cur_repo)
416
416
417 if source_repo_name != new_name:
417 if source_repo_name != new_name:
418 # rename repository
418 # rename repository
419 self._rename_filesystem_repo(
419 self._rename_filesystem_repo(
420 old=source_repo_name, new=new_name)
420 old=source_repo_name, new=new_name)
421
421
422 return cur_repo
422 return cur_repo
423 except Exception:
423 except Exception:
424 log.error(traceback.format_exc())
424 log.error(traceback.format_exc())
425 raise
425 raise
426
426
427 def _create_repo(self, repo_name, repo_type, description, owner,
427 def _create_repo(self, repo_name, repo_type, description, owner,
428 private=False, clone_uri=None, repo_group=None,
428 private=False, clone_uri=None, repo_group=None,
429 landing_rev='rev:tip', fork_of=None,
429 landing_rev='rev:tip', fork_of=None,
430 copy_fork_permissions=False, enable_statistics=False,
430 copy_fork_permissions=False, enable_statistics=False,
431 enable_locking=False, enable_downloads=False,
431 enable_locking=False, enable_downloads=False,
432 copy_group_permissions=False,
432 copy_group_permissions=False,
433 state=Repository.STATE_PENDING):
433 state=Repository.STATE_PENDING):
434 """
434 """
435 Create repository inside database with PENDING state, this should be
435 Create repository inside database with PENDING state, this should be
436 only executed by create() repo. With exception of importing existing
436 only executed by create() repo. With exception of importing existing
437 repos
437 repos
438 """
438 """
439 from rhodecode.model.scm import ScmModel
439 from rhodecode.model.scm import ScmModel
440
440
441 owner = self._get_user(owner)
441 owner = self._get_user(owner)
442 fork_of = self._get_repo(fork_of)
442 fork_of = self._get_repo(fork_of)
443 repo_group = self._get_repo_group(safe_int(repo_group))
443 repo_group = self._get_repo_group(safe_int(repo_group))
444
444
445 try:
445 try:
446 repo_name = safe_unicode(repo_name)
446 repo_name = safe_unicode(repo_name)
447 description = safe_unicode(description)
447 description = safe_unicode(description)
448 # repo name is just a name of repository
448 # repo name is just a name of repository
449 # while repo_name_full is a full qualified name that is combined
449 # while repo_name_full is a full qualified name that is combined
450 # with name and path of group
450 # with name and path of group
451 repo_name_full = repo_name
451 repo_name_full = repo_name
452 repo_name = repo_name.split(Repository.NAME_SEP)[-1]
452 repo_name = repo_name.split(Repository.NAME_SEP)[-1]
453
453
454 new_repo = Repository()
454 new_repo = Repository()
455 new_repo.repo_state = state
455 new_repo.repo_state = state
456 new_repo.enable_statistics = False
456 new_repo.enable_statistics = False
457 new_repo.repo_name = repo_name_full
457 new_repo.repo_name = repo_name_full
458 new_repo.repo_type = repo_type
458 new_repo.repo_type = repo_type
459 new_repo.user = owner
459 new_repo.user = owner
460 new_repo.group = repo_group
460 new_repo.group = repo_group
461 new_repo.description = description or repo_name
461 new_repo.description = description or repo_name
462 new_repo.private = private
462 new_repo.private = private
463 new_repo.clone_uri = clone_uri
463 new_repo.clone_uri = clone_uri
464 new_repo.landing_rev = landing_rev
464 new_repo.landing_rev = landing_rev
465
465
466 new_repo.enable_statistics = enable_statistics
466 new_repo.enable_statistics = enable_statistics
467 new_repo.enable_locking = enable_locking
467 new_repo.enable_locking = enable_locking
468 new_repo.enable_downloads = enable_downloads
468 new_repo.enable_downloads = enable_downloads
469
469
470 if repo_group:
470 if repo_group:
471 new_repo.enable_locking = repo_group.enable_locking
471 new_repo.enable_locking = repo_group.enable_locking
472
472
473 if fork_of:
473 if fork_of:
474 parent_repo = fork_of
474 parent_repo = fork_of
475 new_repo.fork = parent_repo
475 new_repo.fork = parent_repo
476
476
477 events.trigger(events.RepoPreCreateEvent(new_repo))
477 events.trigger(events.RepoPreCreateEvent(new_repo))
478
478
479 self.sa.add(new_repo)
479 self.sa.add(new_repo)
480
480
481 EMPTY_PERM = 'repository.none'
481 EMPTY_PERM = 'repository.none'
482 if fork_of and copy_fork_permissions:
482 if fork_of and copy_fork_permissions:
483 repo = fork_of
483 repo = fork_of
484 user_perms = UserRepoToPerm.query() \
484 user_perms = UserRepoToPerm.query() \
485 .filter(UserRepoToPerm.repository == repo).all()
485 .filter(UserRepoToPerm.repository == repo).all()
486 group_perms = UserGroupRepoToPerm.query() \
486 group_perms = UserGroupRepoToPerm.query() \
487 .filter(UserGroupRepoToPerm.repository == repo).all()
487 .filter(UserGroupRepoToPerm.repository == repo).all()
488
488
489 for perm in user_perms:
489 for perm in user_perms:
490 UserRepoToPerm.create(
490 UserRepoToPerm.create(
491 perm.user, new_repo, perm.permission)
491 perm.user, new_repo, perm.permission)
492
492
493 for perm in group_perms:
493 for perm in group_perms:
494 UserGroupRepoToPerm.create(
494 UserGroupRepoToPerm.create(
495 perm.users_group, new_repo, perm.permission)
495 perm.users_group, new_repo, perm.permission)
496 # in case we copy permissions and also set this repo to private
496 # in case we copy permissions and also set this repo to private
497 # override the default user permission to make it a private
497 # override the default user permission to make it a private
498 # repo
498 # repo
499 if private:
499 if private:
500 RepoModel(self.sa).grant_user_permission(
500 RepoModel(self.sa).grant_user_permission(
501 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
501 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
502
502
503 elif repo_group and copy_group_permissions:
503 elif repo_group and copy_group_permissions:
504 user_perms = UserRepoGroupToPerm.query() \
504 user_perms = UserRepoGroupToPerm.query() \
505 .filter(UserRepoGroupToPerm.group == repo_group).all()
505 .filter(UserRepoGroupToPerm.group == repo_group).all()
506
506
507 group_perms = UserGroupRepoGroupToPerm.query() \
507 group_perms = UserGroupRepoGroupToPerm.query() \
508 .filter(UserGroupRepoGroupToPerm.group == repo_group).all()
508 .filter(UserGroupRepoGroupToPerm.group == repo_group).all()
509
509
510 for perm in user_perms:
510 for perm in user_perms:
511 perm_name = perm.permission.permission_name.replace(
511 perm_name = perm.permission.permission_name.replace(
512 'group.', 'repository.')
512 'group.', 'repository.')
513 perm_obj = Permission.get_by_key(perm_name)
513 perm_obj = Permission.get_by_key(perm_name)
514 UserRepoToPerm.create(perm.user, new_repo, perm_obj)
514 UserRepoToPerm.create(perm.user, new_repo, perm_obj)
515
515
516 for perm in group_perms:
516 for perm in group_perms:
517 perm_name = perm.permission.permission_name.replace(
517 perm_name = perm.permission.permission_name.replace(
518 'group.', 'repository.')
518 'group.', 'repository.')
519 perm_obj = Permission.get_by_key(perm_name)
519 perm_obj = Permission.get_by_key(perm_name)
520 UserGroupRepoToPerm.create(
520 UserGroupRepoToPerm.create(
521 perm.users_group, new_repo, perm_obj)
521 perm.users_group, new_repo, perm_obj)
522
522
523 if private:
523 if private:
524 RepoModel(self.sa).grant_user_permission(
524 RepoModel(self.sa).grant_user_permission(
525 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
525 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
526
526
527 else:
527 else:
528 perm_obj = self._create_default_perms(new_repo, private)
528 perm_obj = self._create_default_perms(new_repo, private)
529 self.sa.add(perm_obj)
529 self.sa.add(perm_obj)
530
530
531 # now automatically start following this repository as owner
531 # now automatically start following this repository as owner
532 ScmModel(self.sa).toggle_following_repo(new_repo.repo_id,
532 ScmModel(self.sa).toggle_following_repo(new_repo.repo_id,
533 owner.user_id)
533 owner.user_id)
534
534
535 # we need to flush here, in order to check if database won't
535 # we need to flush here, in order to check if database won't
536 # throw any exceptions, create filesystem dirs at the very end
536 # throw any exceptions, create filesystem dirs at the very end
537 self.sa.flush()
537 self.sa.flush()
538 events.trigger(events.RepoCreatedEvent(new_repo))
538 events.trigger(events.RepoCreatedEvent(new_repo))
539 return new_repo
539 return new_repo
540
540
541 except Exception:
541 except Exception:
542 log.error(traceback.format_exc())
542 log.error(traceback.format_exc())
543 raise
543 raise
544
544
545 def create(self, form_data, cur_user):
545 def create(self, form_data, cur_user):
546 """
546 """
547 Create repository using celery tasks
547 Create repository using celery tasks
548
548
549 :param form_data:
549 :param form_data:
550 :param cur_user:
550 :param cur_user:
551 """
551 """
552 from rhodecode.lib.celerylib import tasks, run_task
552 from rhodecode.lib.celerylib import tasks, run_task
553 return run_task(tasks.create_repo, form_data, cur_user)
553 return run_task(tasks.create_repo, form_data, cur_user)
554
554
555 def update_permissions(self, repo, perm_additions=None, perm_updates=None,
555 def update_permissions(self, repo, perm_additions=None, perm_updates=None,
556 perm_deletions=None, check_perms=True,
556 perm_deletions=None, check_perms=True,
557 cur_user=None):
557 cur_user=None):
558 if not perm_additions:
558 if not perm_additions:
559 perm_additions = []
559 perm_additions = []
560 if not perm_updates:
560 if not perm_updates:
561 perm_updates = []
561 perm_updates = []
562 if not perm_deletions:
562 if not perm_deletions:
563 perm_deletions = []
563 perm_deletions = []
564
564
565 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
565 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
566
566
567 # update permissions
567 # update permissions
568 for member_id, perm, member_type in perm_updates:
568 for member_id, perm, member_type in perm_updates:
569 member_id = int(member_id)
569 member_id = int(member_id)
570 if member_type == 'user':
570 if member_type == 'user':
571 # this updates also current one if found
571 # this updates also current one if found
572 self.grant_user_permission(
572 self.grant_user_permission(
573 repo=repo, user=member_id, perm=perm)
573 repo=repo, user=member_id, perm=perm)
574 else: # set for user group
574 else: # set for user group
575 # check if we have permissions to alter this usergroup
575 # check if we have permissions to alter this usergroup
576 member_name = UserGroup.get(member_id).users_group_name
576 member_name = UserGroup.get(member_id).users_group_name
577 if not check_perms or HasUserGroupPermissionAny(
577 if not check_perms or HasUserGroupPermissionAny(
578 *req_perms)(member_name, user=cur_user):
578 *req_perms)(member_name, user=cur_user):
579 self.grant_user_group_permission(
579 self.grant_user_group_permission(
580 repo=repo, group_name=member_id, perm=perm)
580 repo=repo, group_name=member_id, perm=perm)
581
581
582 # set new permissions
582 # set new permissions
583 for member_id, perm, member_type in perm_additions:
583 for member_id, perm, member_type in perm_additions:
584 member_id = int(member_id)
584 member_id = int(member_id)
585 if member_type == 'user':
585 if member_type == 'user':
586 self.grant_user_permission(
586 self.grant_user_permission(
587 repo=repo, user=member_id, perm=perm)
587 repo=repo, user=member_id, perm=perm)
588 else: # set for user group
588 else: # set for user group
589 # check if we have permissions to alter this usergroup
589 # check if we have permissions to alter this usergroup
590 member_name = UserGroup.get(member_id).users_group_name
590 member_name = UserGroup.get(member_id).users_group_name
591 if not check_perms or HasUserGroupPermissionAny(
591 if not check_perms or HasUserGroupPermissionAny(
592 *req_perms)(member_name, user=cur_user):
592 *req_perms)(member_name, user=cur_user):
593 self.grant_user_group_permission(
593 self.grant_user_group_permission(
594 repo=repo, group_name=member_id, perm=perm)
594 repo=repo, group_name=member_id, perm=perm)
595
595
596 # delete permissions
596 # delete permissions
597 for member_id, perm, member_type in perm_deletions:
597 for member_id, perm, member_type in perm_deletions:
598 member_id = int(member_id)
598 member_id = int(member_id)
599 if member_type == 'user':
599 if member_type == 'user':
600 self.revoke_user_permission(repo=repo, user=member_id)
600 self.revoke_user_permission(repo=repo, user=member_id)
601 else: # set for user group
601 else: # set for user group
602 # check if we have permissions to alter this usergroup
602 # check if we have permissions to alter this usergroup
603 member_name = UserGroup.get(member_id).users_group_name
603 member_name = UserGroup.get(member_id).users_group_name
604 if not check_perms or HasUserGroupPermissionAny(
604 if not check_perms or HasUserGroupPermissionAny(
605 *req_perms)(member_name, user=cur_user):
605 *req_perms)(member_name, user=cur_user):
606 self.revoke_user_group_permission(
606 self.revoke_user_group_permission(
607 repo=repo, group_name=member_id)
607 repo=repo, group_name=member_id)
608
608
609 def create_fork(self, form_data, cur_user):
609 def create_fork(self, form_data, cur_user):
610 """
610 """
611 Simple wrapper into executing celery task for fork creation
611 Simple wrapper into executing celery task for fork creation
612
612
613 :param form_data:
613 :param form_data:
614 :param cur_user:
614 :param cur_user:
615 """
615 """
616 from rhodecode.lib.celerylib import tasks, run_task
616 from rhodecode.lib.celerylib import tasks, run_task
617 return run_task(tasks.create_repo_fork, form_data, cur_user)
617 return run_task(tasks.create_repo_fork, form_data, cur_user)
618
618
619 def delete(self, repo, forks=None, fs_remove=True, cur_user=None):
619 def delete(self, repo, forks=None, fs_remove=True, cur_user=None):
620 """
620 """
621 Delete given repository, forks parameter defines what do do with
621 Delete given repository, forks parameter defines what do do with
622 attached forks. Throws AttachedForksError if deleted repo has attached
622 attached forks. Throws AttachedForksError if deleted repo has attached
623 forks
623 forks
624
624
625 :param repo:
625 :param repo:
626 :param forks: str 'delete' or 'detach'
626 :param forks: str 'delete' or 'detach'
627 :param fs_remove: remove(archive) repo from filesystem
627 :param fs_remove: remove(archive) repo from filesystem
628 """
628 """
629 if not cur_user:
629 if not cur_user:
630 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
630 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
631 repo = self._get_repo(repo)
631 repo = self._get_repo(repo)
632 if repo:
632 if repo:
633 if forks == 'detach':
633 if forks == 'detach':
634 for r in repo.forks:
634 for r in repo.forks:
635 r.fork = None
635 r.fork = None
636 self.sa.add(r)
636 self.sa.add(r)
637 elif forks == 'delete':
637 elif forks == 'delete':
638 for r in repo.forks:
638 for r in repo.forks:
639 self.delete(r, forks='delete')
639 self.delete(r, forks='delete')
640 elif [f for f in repo.forks]:
640 elif [f for f in repo.forks]:
641 raise AttachedForksError()
641 raise AttachedForksError()
642
642
643 old_repo_dict = repo.get_dict()
643 old_repo_dict = repo.get_dict()
644 events.trigger(events.RepoPreDeleteEvent(repo))
644 events.trigger(events.RepoPreDeleteEvent(repo))
645 try:
645 try:
646 self.sa.delete(repo)
646 self.sa.delete(repo)
647 if fs_remove:
647 if fs_remove:
648 self._delete_filesystem_repo(repo)
648 self._delete_filesystem_repo(repo)
649 else:
649 else:
650 log.debug('skipping removal from filesystem')
650 log.debug('skipping removal from filesystem')
651 old_repo_dict.update({
651 old_repo_dict.update({
652 'deleted_by': cur_user,
652 'deleted_by': cur_user,
653 'deleted_on': time.time(),
653 'deleted_on': time.time(),
654 })
654 })
655 log_delete_repository(**old_repo_dict)
655 log_delete_repository(**old_repo_dict)
656 events.trigger(events.RepoDeletedEvent(repo))
656 events.trigger(events.RepoDeletedEvent(repo))
657 except Exception:
657 except Exception:
658 log.error(traceback.format_exc())
658 log.error(traceback.format_exc())
659 raise
659 raise
660
660
661 def grant_user_permission(self, repo, user, perm):
661 def grant_user_permission(self, repo, user, perm):
662 """
662 """
663 Grant permission for user on given repository, or update existing one
663 Grant permission for user on given repository, or update existing one
664 if found
664 if found
665
665
666 :param repo: Instance of Repository, repository_id, or repository name
666 :param repo: Instance of Repository, repository_id, or repository name
667 :param user: Instance of User, user_id or username
667 :param user: Instance of User, user_id or username
668 :param perm: Instance of Permission, or permission_name
668 :param perm: Instance of Permission, or permission_name
669 """
669 """
670 user = self._get_user(user)
670 user = self._get_user(user)
671 repo = self._get_repo(repo)
671 repo = self._get_repo(repo)
672 permission = self._get_perm(perm)
672 permission = self._get_perm(perm)
673
673
674 # check if we have that permission already
674 # check if we have that permission already
675 obj = self.sa.query(UserRepoToPerm) \
675 obj = self.sa.query(UserRepoToPerm) \
676 .filter(UserRepoToPerm.user == user) \
676 .filter(UserRepoToPerm.user == user) \
677 .filter(UserRepoToPerm.repository == repo) \
677 .filter(UserRepoToPerm.repository == repo) \
678 .scalar()
678 .scalar()
679 if obj is None:
679 if obj is None:
680 # create new !
680 # create new !
681 obj = UserRepoToPerm()
681 obj = UserRepoToPerm()
682 obj.repository = repo
682 obj.repository = repo
683 obj.user = user
683 obj.user = user
684 obj.permission = permission
684 obj.permission = permission
685 self.sa.add(obj)
685 self.sa.add(obj)
686 log.debug('Granted perm %s to %s on %s', perm, user, repo)
686 log.debug('Granted perm %s to %s on %s', perm, user, repo)
687 action_logger_generic(
687 action_logger_generic(
688 'granted permission: {} to user: {} on repo: {}'.format(
688 'granted permission: {} to user: {} on repo: {}'.format(
689 perm, user, repo), namespace='security.repo')
689 perm, user, repo), namespace='security.repo')
690 return obj
690 return obj
691
691
692 def revoke_user_permission(self, repo, user):
692 def revoke_user_permission(self, repo, user):
693 """
693 """
694 Revoke permission for user on given repository
694 Revoke permission for user on given repository
695
695
696 :param repo: Instance of Repository, repository_id, or repository name
696 :param repo: Instance of Repository, repository_id, or repository name
697 :param user: Instance of User, user_id or username
697 :param user: Instance of User, user_id or username
698 """
698 """
699
699
700 user = self._get_user(user)
700 user = self._get_user(user)
701 repo = self._get_repo(repo)
701 repo = self._get_repo(repo)
702
702
703 obj = self.sa.query(UserRepoToPerm) \
703 obj = self.sa.query(UserRepoToPerm) \
704 .filter(UserRepoToPerm.repository == repo) \
704 .filter(UserRepoToPerm.repository == repo) \
705 .filter(UserRepoToPerm.user == user) \
705 .filter(UserRepoToPerm.user == user) \
706 .scalar()
706 .scalar()
707 if obj:
707 if obj:
708 self.sa.delete(obj)
708 self.sa.delete(obj)
709 log.debug('Revoked perm on %s on %s', repo, user)
709 log.debug('Revoked perm on %s on %s', repo, user)
710 action_logger_generic(
710 action_logger_generic(
711 'revoked permission from user: {} on repo: {}'.format(
711 'revoked permission from user: {} on repo: {}'.format(
712 user, repo), namespace='security.repo')
712 user, repo), namespace='security.repo')
713
713
714 def grant_user_group_permission(self, repo, group_name, perm):
714 def grant_user_group_permission(self, repo, group_name, perm):
715 """
715 """
716 Grant permission for user group on given repository, or update
716 Grant permission for user group on given repository, or update
717 existing one if found
717 existing one if found
718
718
719 :param repo: Instance of Repository, repository_id, or repository name
719 :param repo: Instance of Repository, repository_id, or repository name
720 :param group_name: Instance of UserGroup, users_group_id,
720 :param group_name: Instance of UserGroup, users_group_id,
721 or user group name
721 or user group name
722 :param perm: Instance of Permission, or permission_name
722 :param perm: Instance of Permission, or permission_name
723 """
723 """
724 repo = self._get_repo(repo)
724 repo = self._get_repo(repo)
725 group_name = self._get_user_group(group_name)
725 group_name = self._get_user_group(group_name)
726 permission = self._get_perm(perm)
726 permission = self._get_perm(perm)
727
727
728 # check if we have that permission already
728 # check if we have that permission already
729 obj = self.sa.query(UserGroupRepoToPerm) \
729 obj = self.sa.query(UserGroupRepoToPerm) \
730 .filter(UserGroupRepoToPerm.users_group == group_name) \
730 .filter(UserGroupRepoToPerm.users_group == group_name) \
731 .filter(UserGroupRepoToPerm.repository == repo) \
731 .filter(UserGroupRepoToPerm.repository == repo) \
732 .scalar()
732 .scalar()
733
733
734 if obj is None:
734 if obj is None:
735 # create new
735 # create new
736 obj = UserGroupRepoToPerm()
736 obj = UserGroupRepoToPerm()
737
737
738 obj.repository = repo
738 obj.repository = repo
739 obj.users_group = group_name
739 obj.users_group = group_name
740 obj.permission = permission
740 obj.permission = permission
741 self.sa.add(obj)
741 self.sa.add(obj)
742 log.debug('Granted perm %s to %s on %s', perm, group_name, repo)
742 log.debug('Granted perm %s to %s on %s', perm, group_name, repo)
743 action_logger_generic(
743 action_logger_generic(
744 'granted permission: {} to usergroup: {} on repo: {}'.format(
744 'granted permission: {} to usergroup: {} on repo: {}'.format(
745 perm, group_name, repo), namespace='security.repo')
745 perm, group_name, repo), namespace='security.repo')
746
746
747 return obj
747 return obj
748
748
749 def revoke_user_group_permission(self, repo, group_name):
749 def revoke_user_group_permission(self, repo, group_name):
750 """
750 """
751 Revoke permission for user group on given repository
751 Revoke permission for user group on given repository
752
752
753 :param repo: Instance of Repository, repository_id, or repository name
753 :param repo: Instance of Repository, repository_id, or repository name
754 :param group_name: Instance of UserGroup, users_group_id,
754 :param group_name: Instance of UserGroup, users_group_id,
755 or user group name
755 or user group name
756 """
756 """
757 repo = self._get_repo(repo)
757 repo = self._get_repo(repo)
758 group_name = self._get_user_group(group_name)
758 group_name = self._get_user_group(group_name)
759
759
760 obj = self.sa.query(UserGroupRepoToPerm) \
760 obj = self.sa.query(UserGroupRepoToPerm) \
761 .filter(UserGroupRepoToPerm.repository == repo) \
761 .filter(UserGroupRepoToPerm.repository == repo) \
762 .filter(UserGroupRepoToPerm.users_group == group_name) \
762 .filter(UserGroupRepoToPerm.users_group == group_name) \
763 .scalar()
763 .scalar()
764 if obj:
764 if obj:
765 self.sa.delete(obj)
765 self.sa.delete(obj)
766 log.debug('Revoked perm to %s on %s', repo, group_name)
766 log.debug('Revoked perm to %s on %s', repo, group_name)
767 action_logger_generic(
767 action_logger_generic(
768 'revoked permission from usergroup: {} on repo: {}'.format(
768 'revoked permission from usergroup: {} on repo: {}'.format(
769 group_name, repo), namespace='security.repo')
769 group_name, repo), namespace='security.repo')
770
770
771 def delete_stats(self, repo_name):
771 def delete_stats(self, repo_name):
772 """
772 """
773 removes stats for given repo
773 removes stats for given repo
774
774
775 :param repo_name:
775 :param repo_name:
776 """
776 """
777 repo = self._get_repo(repo_name)
777 repo = self._get_repo(repo_name)
778 try:
778 try:
779 obj = self.sa.query(Statistics) \
779 obj = self.sa.query(Statistics) \
780 .filter(Statistics.repository == repo).scalar()
780 .filter(Statistics.repository == repo).scalar()
781 if obj:
781 if obj:
782 self.sa.delete(obj)
782 self.sa.delete(obj)
783 except Exception:
783 except Exception:
784 log.error(traceback.format_exc())
784 log.error(traceback.format_exc())
785 raise
785 raise
786
786
787 def add_repo_field(self, repo_name, field_key, field_label, field_value='',
787 def add_repo_field(self, repo_name, field_key, field_label, field_value='',
788 field_type='str', field_desc=''):
788 field_type='str', field_desc=''):
789
789
790 repo = self._get_repo(repo_name)
790 repo = self._get_repo(repo_name)
791
791
792 new_field = RepositoryField()
792 new_field = RepositoryField()
793 new_field.repository = repo
793 new_field.repository = repo
794 new_field.field_key = field_key
794 new_field.field_key = field_key
795 new_field.field_type = field_type # python type
795 new_field.field_type = field_type # python type
796 new_field.field_value = field_value
796 new_field.field_value = field_value
797 new_field.field_desc = field_desc
797 new_field.field_desc = field_desc
798 new_field.field_label = field_label
798 new_field.field_label = field_label
799 self.sa.add(new_field)
799 self.sa.add(new_field)
800 return new_field
800 return new_field
801
801
802 def delete_repo_field(self, repo_name, field_key):
802 def delete_repo_field(self, repo_name, field_key):
803 repo = self._get_repo(repo_name)
803 repo = self._get_repo(repo_name)
804 field = RepositoryField.get_by_key_name(field_key, repo)
804 field = RepositoryField.get_by_key_name(field_key, repo)
805 if field:
805 if field:
806 self.sa.delete(field)
806 self.sa.delete(field)
807
807
808 def _create_filesystem_repo(self, repo_name, repo_type, repo_group,
808 def _create_filesystem_repo(self, repo_name, repo_type, repo_group,
809 clone_uri=None, repo_store_location=None,
809 clone_uri=None, repo_store_location=None,
810 use_global_config=False):
810 use_global_config=False):
811 """
811 """
812 makes repository on filesystem. It's group aware means it'll create
812 makes repository on filesystem. It's group aware means it'll create
813 a repository within a group, and alter the paths accordingly of
813 a repository within a group, and alter the paths accordingly of
814 group location
814 group location
815
815
816 :param repo_name:
816 :param repo_name:
817 :param alias:
817 :param alias:
818 :param parent:
818 :param parent:
819 :param clone_uri:
819 :param clone_uri:
820 :param repo_store_location:
820 :param repo_store_location:
821 """
821 """
822 from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group
822 from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group
823 from rhodecode.model.scm import ScmModel
823 from rhodecode.model.scm import ScmModel
824
824
825 if Repository.NAME_SEP in repo_name:
825 if Repository.NAME_SEP in repo_name:
826 raise ValueError(
826 raise ValueError(
827 'repo_name must not contain groups got `%s`' % repo_name)
827 'repo_name must not contain groups got `%s`' % repo_name)
828
828
829 if isinstance(repo_group, RepoGroup):
829 if isinstance(repo_group, RepoGroup):
830 new_parent_path = os.sep.join(repo_group.full_path_splitted)
830 new_parent_path = os.sep.join(repo_group.full_path_splitted)
831 else:
831 else:
832 new_parent_path = repo_group or ''
832 new_parent_path = repo_group or ''
833
833
834 if repo_store_location:
834 if repo_store_location:
835 _paths = [repo_store_location]
835 _paths = [repo_store_location]
836 else:
836 else:
837 _paths = [self.repos_path, new_parent_path, repo_name]
837 _paths = [self.repos_path, new_parent_path, repo_name]
838 # we need to make it str for mercurial
838 # we need to make it str for mercurial
839 repo_path = os.path.join(*map(lambda x: safe_str(x), _paths))
839 repo_path = os.path.join(*map(lambda x: safe_str(x), _paths))
840
840
841 # check if this path is not a repository
841 # check if this path is not a repository
842 if is_valid_repo(repo_path, self.repos_path):
842 if is_valid_repo(repo_path, self.repos_path):
843 raise Exception('This path %s is a valid repository' % repo_path)
843 raise Exception('This path %s is a valid repository' % repo_path)
844
844
845 # check if this path is a group
845 # check if this path is a group
846 if is_valid_repo_group(repo_path, self.repos_path):
846 if is_valid_repo_group(repo_path, self.repos_path):
847 raise Exception('This path %s is a valid group' % repo_path)
847 raise Exception('This path %s is a valid group' % repo_path)
848
848
849 log.info('creating repo %s in %s from url: `%s`',
849 log.info('creating repo %s in %s from url: `%s`',
850 repo_name, safe_unicode(repo_path),
850 repo_name, safe_unicode(repo_path),
851 obfuscate_url_pw(clone_uri))
851 obfuscate_url_pw(clone_uri))
852
852
853 backend = get_backend(repo_type)
853 backend = get_backend(repo_type)
854
854
855 config_repo = None if use_global_config else repo_name
855 config_repo = None if use_global_config else repo_name
856 if config_repo and new_parent_path:
856 if config_repo and new_parent_path:
857 config_repo = Repository.NAME_SEP.join(
857 config_repo = Repository.NAME_SEP.join(
858 (new_parent_path, config_repo))
858 (new_parent_path, config_repo))
859 config = make_db_config(clear_session=False, repo=config_repo)
859 config = make_db_config(clear_session=False, repo=config_repo)
860 config.set('extensions', 'largefiles', '')
860 config.set('extensions', 'largefiles', '')
861
861
862 # patch and reset hooks section of UI config to not run any
862 # patch and reset hooks section of UI config to not run any
863 # hooks on creating remote repo
863 # hooks on creating remote repo
864 config.clear_section('hooks')
864 config.clear_section('hooks')
865
865
866 # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice
866 # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice
867 if repo_type == 'git':
867 if repo_type == 'git':
868 repo = backend(
868 repo = backend(
869 repo_path, config=config, create=True, src_url=clone_uri,
869 repo_path, config=config, create=True, src_url=clone_uri,
870 bare=True)
870 bare=True)
871 else:
871 else:
872 repo = backend(
872 repo = backend(
873 repo_path, config=config, create=True, src_url=clone_uri)
873 repo_path, config=config, create=True, src_url=clone_uri)
874
874
875 ScmModel().install_hooks(repo, repo_type=repo_type)
875 ScmModel().install_hooks(repo, repo_type=repo_type)
876
876
877 log.debug('Created repo %s with %s backend',
877 log.debug('Created repo %s with %s backend',
878 safe_unicode(repo_name), safe_unicode(repo_type))
878 safe_unicode(repo_name), safe_unicode(repo_type))
879 return repo
879 return repo
880
880
881 def _rename_filesystem_repo(self, old, new):
881 def _rename_filesystem_repo(self, old, new):
882 """
882 """
883 renames repository on filesystem
883 renames repository on filesystem
884
884
885 :param old: old name
885 :param old: old name
886 :param new: new name
886 :param new: new name
887 """
887 """
888 log.info('renaming repo from %s to %s', old, new)
888 log.info('renaming repo from %s to %s', old, new)
889
889
890 old_path = os.path.join(self.repos_path, old)
890 old_path = os.path.join(self.repos_path, old)
891 new_path = os.path.join(self.repos_path, new)
891 new_path = os.path.join(self.repos_path, new)
892 if os.path.isdir(new_path):
892 if os.path.isdir(new_path):
893 raise Exception(
893 raise Exception(
894 'Was trying to rename to already existing dir %s' % new_path
894 'Was trying to rename to already existing dir %s' % new_path
895 )
895 )
896 shutil.move(old_path, new_path)
896 shutil.move(old_path, new_path)
897
897
898 def _delete_filesystem_repo(self, repo):
898 def _delete_filesystem_repo(self, repo):
899 """
899 """
900 removes repo from filesystem, the removal is acctually made by
900 removes repo from filesystem, the removal is acctually made by
901 added rm__ prefix into dir, and rename internat .hg/.git dirs so this
901 added rm__ prefix into dir, and rename internat .hg/.git dirs so this
902 repository is no longer valid for rhodecode, can be undeleted later on
902 repository is no longer valid for rhodecode, can be undeleted later on
903 by reverting the renames on this repository
903 by reverting the renames on this repository
904
904
905 :param repo: repo object
905 :param repo: repo object
906 """
906 """
907 rm_path = os.path.join(self.repos_path, repo.repo_name)
907 rm_path = os.path.join(self.repos_path, repo.repo_name)
908 repo_group = repo.group
908 repo_group = repo.group
909 log.info("Removing repository %s", rm_path)
909 log.info("Removing repository %s", rm_path)
910 # disable hg/git internal that it doesn't get detected as repo
910 # disable hg/git internal that it doesn't get detected as repo
911 alias = repo.repo_type
911 alias = repo.repo_type
912
912
913 config = make_db_config(clear_session=False)
913 config = make_db_config(clear_session=False)
914 config.set('extensions', 'largefiles', '')
914 config.set('extensions', 'largefiles', '')
915 bare = getattr(repo.scm_instance(config=config), 'bare', False)
915 bare = getattr(repo.scm_instance(config=config), 'bare', False)
916
916
917 # skip this for bare git repos
917 # skip this for bare git repos
918 if not bare:
918 if not bare:
919 # disable VCS repo
919 # disable VCS repo
920 vcs_path = os.path.join(rm_path, '.%s' % alias)
920 vcs_path = os.path.join(rm_path, '.%s' % alias)
921 if os.path.exists(vcs_path):
921 if os.path.exists(vcs_path):
922 shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias))
922 shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias))
923
923
924 _now = datetime.now()
924 _now = datetime.now()
925 _ms = str(_now.microsecond).rjust(6, '0')
925 _ms = str(_now.microsecond).rjust(6, '0')
926 _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms),
926 _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms),
927 repo.just_name)
927 repo.just_name)
928 if repo_group:
928 if repo_group:
929 # if repository is in group, prefix the removal path with the group
929 # if repository is in group, prefix the removal path with the group
930 args = repo_group.full_path_splitted + [_d]
930 args = repo_group.full_path_splitted + [_d]
931 _d = os.path.join(*args)
931 _d = os.path.join(*args)
932
932
933 if os.path.isdir(rm_path):
933 if os.path.isdir(rm_path):
934 shutil.move(rm_path, os.path.join(self.repos_path, _d))
934 shutil.move(rm_path, os.path.join(self.repos_path, _d))
@@ -1,61 +1,78 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import pytest
21 import pytest
22
22
23 from rhodecode.tests.events.conftest import EventCatcher
23 from rhodecode.tests.events.conftest import EventCatcher
24
24
25 from rhodecode.model.pull_request import PullRequestModel
25 from rhodecode.model.pull_request import PullRequestModel
26 from rhodecode.events import (
26 from rhodecode.events import (
27 PullRequestCreateEvent,
27 PullRequestCreateEvent,
28 PullRequestUpdateEvent,
28 PullRequestUpdateEvent,
29 PullRequestReviewEvent,
29 PullRequestReviewEvent,
30 PullRequestMergeEvent,
30 PullRequestMergeEvent,
31 PullRequestCloseEvent,
31 PullRequestCloseEvent,
32 )
32 )
33
33
34 # TODO: dan: make the serialization tests complete json comparisons
35 @pytest.mark.backends("git", "hg")
36 @pytest.mark.parametrize('EventClass', [
37 PullRequestCreateEvent,
38 PullRequestUpdateEvent,
39 PullRequestReviewEvent,
40 PullRequestMergeEvent,
41 PullRequestCloseEvent,
42 ])
43 def test_pullrequest_events_serialized(pr_util, EventClass):
44 pr = pr_util.create_pull_request()
45 event = EventClass(pr)
46 data = event.as_dict()
47 assert data['name'] == EventClass.name
48 assert data['repo']['repo_name'] == pr.target_repo.repo_name
49 assert data['pullrequest']['pull_request_id'] == pr.pull_request_id
50
34
51
35 @pytest.mark.backends("git", "hg")
52 @pytest.mark.backends("git", "hg")
36 def test_create_pull_request_events(pr_util):
53 def test_create_pull_request_events(pr_util):
37 with EventCatcher() as event_catcher:
54 with EventCatcher() as event_catcher:
38 pr_util.create_pull_request()
55 pr_util.create_pull_request()
39
56
40 assert PullRequestCreateEvent in event_catcher.events_types
57 assert PullRequestCreateEvent in event_catcher.events_types
41
58
42
59
43 @pytest.mark.backends("git", "hg")
60 @pytest.mark.backends("git", "hg")
44 def test_close_pull_request_events(pr_util, user_admin):
61 def test_close_pull_request_events(pr_util, user_admin):
45 pr = pr_util.create_pull_request()
62 pr = pr_util.create_pull_request()
46
63
47 with EventCatcher() as event_catcher:
64 with EventCatcher() as event_catcher:
48 PullRequestModel().close_pull_request(pr, user_admin)
65 PullRequestModel().close_pull_request(pr, user_admin)
49
66
50 assert PullRequestCloseEvent in event_catcher.events_types
67 assert PullRequestCloseEvent in event_catcher.events_types
51
68
52
69
53 @pytest.mark.backends("git", "hg")
70 @pytest.mark.backends("git", "hg")
54 def test_close_pull_request_with_comment_events(pr_util, user_admin):
71 def test_close_pull_request_with_comment_events(pr_util, user_admin):
55 pr = pr_util.create_pull_request()
72 pr = pr_util.create_pull_request()
56
73
57 with EventCatcher() as event_catcher:
74 with EventCatcher() as event_catcher:
58 PullRequestModel().close_pull_request_with_comment(
75 PullRequestModel().close_pull_request_with_comment(
59 pr, user_admin, pr.target_repo)
76 pr, user_admin, pr.target_repo)
60
77
61 assert PullRequestCloseEvent in event_catcher.events_types
78 assert PullRequestCloseEvent in event_catcher.events_types
@@ -1,79 +1,113 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import pytest
21 import pytest
22
22
23 from rhodecode.tests.events.conftest import EventCatcher
23 from rhodecode.tests.events.conftest import EventCatcher
24
24
25 from rhodecode.lib import hooks_base, utils2
25 from rhodecode.lib import hooks_base, utils2
26 from rhodecode.model.repo import RepoModel
26 from rhodecode.model.repo import RepoModel
27 from rhodecode.events.repo import (
27 from rhodecode.events.repo import (
28 RepoPrePullEvent, RepoPullEvent,
28 RepoPrePullEvent, RepoPullEvent,
29 RepoPrePushEvent, RepoPushEvent,
29 RepoPrePushEvent, RepoPushEvent,
30 RepoPreCreateEvent, RepoCreatedEvent,
30 RepoPreCreateEvent, RepoCreatedEvent,
31 RepoPreDeleteEvent, RepoDeletedEvent,
31 RepoPreDeleteEvent, RepoDeletedEvent,
32 )
32 )
33
33
34
34
35 @pytest.fixture
35 @pytest.fixture
36 def scm_extras(user_regular, repo_stub):
36 def scm_extras(user_regular, repo_stub):
37 extras = utils2.AttributeDict({
37 extras = utils2.AttributeDict({
38 'ip': '127.0.0.1',
38 'ip': '127.0.0.1',
39 'username': user_regular.username,
39 'username': user_regular.username,
40 'action': '',
40 'action': '',
41 'repository': repo_stub.repo_name,
41 'repository': repo_stub.repo_name,
42 'scm': repo_stub.scm_instance().alias,
42 'scm': repo_stub.scm_instance().alias,
43 'config': '',
43 'config': '',
44 'server_url': 'http://example.com',
44 'server_url': 'http://example.com',
45 'make_lock': None,
45 'make_lock': None,
46 'locked_by': [None],
46 'locked_by': [None],
47 'commit_ids': ['a' * 40] * 3,
47 'commit_ids': ['a' * 40] * 3,
48 })
48 })
49 return extras
49 return extras
50
50
51
51
52 # TODO: dan: make the serialization tests complete json comparisons
53 @pytest.mark.parametrize('EventClass', [
54 RepoPreCreateEvent, RepoCreatedEvent,
55 RepoPreDeleteEvent, RepoDeletedEvent,
56 ])
57 def test_repo_events_serialized(repo_stub, EventClass):
58 event = EventClass(repo_stub)
59 data = event.as_dict()
60 assert data['name'] == EventClass.name
61 assert data['repo']['repo_name'] == repo_stub.repo_name
62
63
64 @pytest.mark.parametrize('EventClass', [
65 RepoPrePullEvent, RepoPullEvent, RepoPrePushEvent
66 ])
67 def test_vcs_repo_events_serialize(repo_stub, scm_extras, EventClass):
68 event = EventClass(repo_name=repo_stub.repo_name, extras=scm_extras)
69 data = event.as_dict()
70 assert data['name'] == EventClass.name
71 assert data['repo']['repo_name'] == repo_stub.repo_name
72
73
74
75 @pytest.mark.parametrize('EventClass', [RepoPushEvent])
76 def test_vcs_repo_events_serialize(repo_stub, scm_extras, EventClass):
77 event = EventClass(repo_name=repo_stub.repo_name,
78 pushed_commit_ids=scm_extras['commit_ids'],
79 extras=scm_extras)
80 data = event.as_dict()
81 assert data['name'] == EventClass.name
82 assert data['repo']['repo_name'] == repo_stub.repo_name
83
84
52 def test_create_delete_repo_fires_events(backend):
85 def test_create_delete_repo_fires_events(backend):
53 with EventCatcher() as event_catcher:
86 with EventCatcher() as event_catcher:
54 repo = backend.create_repo()
87 repo = backend.create_repo()
55 assert event_catcher.events_types == [RepoPreCreateEvent, RepoCreatedEvent]
88 assert event_catcher.events_types == [RepoPreCreateEvent, RepoCreatedEvent]
56
89
57 with EventCatcher() as event_catcher:
90 with EventCatcher() as event_catcher:
58 RepoModel().delete(repo)
91 RepoModel().delete(repo)
59 assert event_catcher.events_types == [RepoPreDeleteEvent, RepoDeletedEvent]
92 assert event_catcher.events_types == [RepoPreDeleteEvent, RepoDeletedEvent]
60
93
61
94
62 def test_pull_fires_events(scm_extras):
95 def test_pull_fires_events(scm_extras):
63 with EventCatcher() as event_catcher:
96 with EventCatcher() as event_catcher:
64 hooks_base.pre_push(scm_extras)
97 hooks_base.pre_push(scm_extras)
65 assert event_catcher.events_types == [RepoPrePushEvent]
98 assert event_catcher.events_types == [RepoPrePushEvent]
66
99
67 with EventCatcher() as event_catcher:
100 with EventCatcher() as event_catcher:
68 hooks_base.post_push(scm_extras)
101 hooks_base.post_push(scm_extras)
69 assert event_catcher.events_types == [RepoPushEvent]
102 assert event_catcher.events_types == [RepoPushEvent]
70
103
71
104
72 def test_push_fires_events(scm_extras):
105 def test_push_fires_events(scm_extras):
73 with EventCatcher() as event_catcher:
106 with EventCatcher() as event_catcher:
74 hooks_base.pre_pull(scm_extras)
107 hooks_base.pre_pull(scm_extras)
75 assert event_catcher.events_types == [RepoPrePullEvent]
108 assert event_catcher.events_types == [RepoPrePullEvent]
76
109
77 with EventCatcher() as event_catcher:
110 with EventCatcher() as event_catcher:
78 hooks_base.post_pull(scm_extras)
111 hooks_base.post_pull(scm_extras)
79 assert event_catcher.events_types == [RepoPullEvent]
112 assert event_catcher.events_types == [RepoPullEvent]
113
General Comments 0
You need to be logged in to leave comments. Login now