Show More
@@ -1,568 +1,576 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2011-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2011-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import itertools |
|
21 | import itertools | |
22 | import logging |
|
22 | import logging | |
23 | import sys |
|
23 | import sys | |
24 | import types |
|
24 | import types | |
25 | import fnmatch |
|
25 | import fnmatch | |
26 |
|
26 | |||
27 | import decorator |
|
27 | import decorator | |
28 | import venusian |
|
28 | import venusian | |
29 | from collections import OrderedDict |
|
29 | from collections import OrderedDict | |
30 |
|
30 | |||
31 | from pyramid.exceptions import ConfigurationError |
|
31 | from pyramid.exceptions import ConfigurationError | |
32 | from pyramid.renderers import render |
|
32 | from pyramid.renderers import render | |
33 | from pyramid.response import Response |
|
33 | from pyramid.response import Response | |
34 | from pyramid.httpexceptions import HTTPNotFound |
|
34 | from pyramid.httpexceptions import HTTPNotFound | |
35 |
|
35 | |||
36 | from rhodecode.api.exc import ( |
|
36 | from rhodecode.api.exc import ( | |
37 | JSONRPCBaseError, JSONRPCError, JSONRPCForbidden, JSONRPCValidationError) |
|
37 | JSONRPCBaseError, JSONRPCError, JSONRPCForbidden, JSONRPCValidationError) | |
38 | from rhodecode.apps._base import TemplateArgs |
|
38 | from rhodecode.apps._base import TemplateArgs | |
39 | from rhodecode.lib.auth import AuthUser |
|
39 | from rhodecode.lib.auth import AuthUser | |
40 | from rhodecode.lib.base import get_ip_addr, attach_context_attributes |
|
40 | from rhodecode.lib.base import get_ip_addr, attach_context_attributes | |
41 | from rhodecode.lib.exc_tracking import store_exception |
|
41 | from rhodecode.lib.exc_tracking import store_exception | |
42 | from rhodecode.lib.ext_json import json |
|
42 | from rhodecode.lib.ext_json import json | |
43 | from rhodecode.lib.utils2 import safe_str |
|
43 | from rhodecode.lib.utils2 import safe_str | |
44 | from rhodecode.lib.plugins.utils import get_plugin_settings |
|
44 | from rhodecode.lib.plugins.utils import get_plugin_settings | |
45 | from rhodecode.model.db import User, UserApiKeys |
|
45 | from rhodecode.model.db import User, UserApiKeys | |
46 |
|
46 | |||
47 | log = logging.getLogger(__name__) |
|
47 | log = logging.getLogger(__name__) | |
48 |
|
48 | |||
49 | DEFAULT_RENDERER = 'jsonrpc_renderer' |
|
49 | DEFAULT_RENDERER = 'jsonrpc_renderer' | |
50 | DEFAULT_URL = '/_admin/apiv2' |
|
50 | DEFAULT_URL = '/_admin/apiv2' | |
51 |
|
51 | |||
52 |
|
52 | |||
53 | def find_methods(jsonrpc_methods, pattern): |
|
53 | def find_methods(jsonrpc_methods, pattern): | |
54 | matches = OrderedDict() |
|
54 | matches = OrderedDict() | |
55 | if not isinstance(pattern, (list, tuple)): |
|
55 | if not isinstance(pattern, (list, tuple)): | |
56 | pattern = [pattern] |
|
56 | pattern = [pattern] | |
57 |
|
57 | |||
58 | for single_pattern in pattern: |
|
58 | for single_pattern in pattern: | |
59 | for method_name, method in jsonrpc_methods.items(): |
|
59 | for method_name, method in jsonrpc_methods.items(): | |
60 | if fnmatch.fnmatch(method_name, single_pattern): |
|
60 | if fnmatch.fnmatch(method_name, single_pattern): | |
61 | matches[method_name] = method |
|
61 | matches[method_name] = method | |
62 | return matches |
|
62 | return matches | |
63 |
|
63 | |||
64 |
|
64 | |||
65 | class ExtJsonRenderer(object): |
|
65 | class ExtJsonRenderer(object): | |
66 | """ |
|
66 | """ | |
67 | Custom renderer that mkaes use of our ext_json lib |
|
67 | Custom renderer that mkaes use of our ext_json lib | |
68 |
|
68 | |||
69 | """ |
|
69 | """ | |
70 |
|
70 | |||
71 | def __init__(self, serializer=json.dumps, **kw): |
|
71 | def __init__(self, serializer=json.dumps, **kw): | |
72 | """ Any keyword arguments will be passed to the ``serializer`` |
|
72 | """ Any keyword arguments will be passed to the ``serializer`` | |
73 | function.""" |
|
73 | function.""" | |
74 | self.serializer = serializer |
|
74 | self.serializer = serializer | |
75 | self.kw = kw |
|
75 | self.kw = kw | |
76 |
|
76 | |||
77 | def __call__(self, info): |
|
77 | def __call__(self, info): | |
78 | """ Returns a plain JSON-encoded string with content-type |
|
78 | """ Returns a plain JSON-encoded string with content-type | |
79 | ``application/json``. The content-type may be overridden by |
|
79 | ``application/json``. The content-type may be overridden by | |
80 | setting ``request.response.content_type``.""" |
|
80 | setting ``request.response.content_type``.""" | |
81 |
|
81 | |||
82 | def _render(value, system): |
|
82 | def _render(value, system): | |
83 | request = system.get('request') |
|
83 | request = system.get('request') | |
84 | if request is not None: |
|
84 | if request is not None: | |
85 | response = request.response |
|
85 | response = request.response | |
86 | ct = response.content_type |
|
86 | ct = response.content_type | |
87 | if ct == response.default_content_type: |
|
87 | if ct == response.default_content_type: | |
88 | response.content_type = 'application/json' |
|
88 | response.content_type = 'application/json' | |
89 |
|
89 | |||
90 | return self.serializer(value, **self.kw) |
|
90 | return self.serializer(value, **self.kw) | |
91 |
|
91 | |||
92 | return _render |
|
92 | return _render | |
93 |
|
93 | |||
94 |
|
94 | |||
95 | def jsonrpc_response(request, result): |
|
95 | def jsonrpc_response(request, result): | |
96 | rpc_id = getattr(request, 'rpc_id', None) |
|
96 | rpc_id = getattr(request, 'rpc_id', None) | |
97 | response = request.response |
|
97 | response = request.response | |
98 |
|
98 | |||
99 | # store content_type before render is called |
|
99 | # store content_type before render is called | |
100 | ct = response.content_type |
|
100 | ct = response.content_type | |
101 |
|
101 | |||
102 | ret_value = '' |
|
102 | ret_value = '' | |
103 | if rpc_id: |
|
103 | if rpc_id: | |
104 | ret_value = { |
|
104 | ret_value = { | |
105 | 'id': rpc_id, |
|
105 | 'id': rpc_id, | |
106 | 'result': result, |
|
106 | 'result': result, | |
107 | 'error': None, |
|
107 | 'error': None, | |
108 | } |
|
108 | } | |
109 |
|
109 | |||
110 | # fetch deprecation warnings, and store it inside results |
|
110 | # fetch deprecation warnings, and store it inside results | |
111 | deprecation = getattr(request, 'rpc_deprecation', None) |
|
111 | deprecation = getattr(request, 'rpc_deprecation', None) | |
112 | if deprecation: |
|
112 | if deprecation: | |
113 | ret_value['DEPRECATION_WARNING'] = deprecation |
|
113 | ret_value['DEPRECATION_WARNING'] = deprecation | |
114 |
|
114 | |||
115 | raw_body = render(DEFAULT_RENDERER, ret_value, request=request) |
|
115 | raw_body = render(DEFAULT_RENDERER, ret_value, request=request) | |
116 | response.body = safe_str(raw_body, response.charset) |
|
116 | response.body = safe_str(raw_body, response.charset) | |
117 |
|
117 | |||
118 | if ct == response.default_content_type: |
|
118 | if ct == response.default_content_type: | |
119 | response.content_type = 'application/json' |
|
119 | response.content_type = 'application/json' | |
120 |
|
120 | |||
121 | return response |
|
121 | return response | |
122 |
|
122 | |||
123 |
|
123 | |||
124 | def jsonrpc_error(request, message, retid=None, code=None, headers=None): |
|
124 | def jsonrpc_error(request, message, retid=None, code=None, headers=None): | |
125 | """ |
|
125 | """ | |
126 | Generate a Response object with a JSON-RPC error body |
|
126 | Generate a Response object with a JSON-RPC error body | |
127 |
|
127 | |||
128 | :param code: |
|
128 | :param code: | |
129 | :param retid: |
|
129 | :param retid: | |
130 | :param message: |
|
130 | :param message: | |
131 | """ |
|
131 | """ | |
132 | err_dict = {'id': retid, 'result': None, 'error': message} |
|
132 | err_dict = {'id': retid, 'result': None, 'error': message} | |
133 | body = render(DEFAULT_RENDERER, err_dict, request=request).encode('utf-8') |
|
133 | body = render(DEFAULT_RENDERER, err_dict, request=request).encode('utf-8') | |
134 |
|
134 | |||
135 | return Response( |
|
135 | return Response( | |
136 | body=body, |
|
136 | body=body, | |
137 | status=code, |
|
137 | status=code, | |
138 | content_type='application/json', |
|
138 | content_type='application/json', | |
139 | headerlist=headers |
|
139 | headerlist=headers | |
140 | ) |
|
140 | ) | |
141 |
|
141 | |||
142 |
|
142 | |||
143 | def exception_view(exc, request): |
|
143 | def exception_view(exc, request): | |
144 | rpc_id = getattr(request, 'rpc_id', None) |
|
144 | rpc_id = getattr(request, 'rpc_id', None) | |
145 |
|
145 | |||
146 | if isinstance(exc, JSONRPCError): |
|
146 | if isinstance(exc, JSONRPCError): | |
147 | fault_message = safe_str(exc.message) |
|
147 | fault_message = safe_str(exc.message) | |
148 | log.debug('json-rpc error rpc_id:%s "%s"', rpc_id, fault_message) |
|
148 | log.debug('json-rpc error rpc_id:%s "%s"', rpc_id, fault_message) | |
149 | elif isinstance(exc, JSONRPCValidationError): |
|
149 | elif isinstance(exc, JSONRPCValidationError): | |
150 | colander_exc = exc.colander_exception |
|
150 | colander_exc = exc.colander_exception | |
151 | # TODO(marcink): think maybe of nicer way to serialize errors ? |
|
151 | # TODO(marcink): think maybe of nicer way to serialize errors ? | |
152 | fault_message = colander_exc.asdict() |
|
152 | fault_message = colander_exc.asdict() | |
153 | log.debug('json-rpc colander error rpc_id:%s "%s"', rpc_id, fault_message) |
|
153 | log.debug('json-rpc colander error rpc_id:%s "%s"', rpc_id, fault_message) | |
154 | elif isinstance(exc, JSONRPCForbidden): |
|
154 | elif isinstance(exc, JSONRPCForbidden): | |
155 | fault_message = 'Access was denied to this resource.' |
|
155 | fault_message = 'Access was denied to this resource.' | |
156 | log.warning('json-rpc forbidden call rpc_id:%s "%s"', rpc_id, fault_message) |
|
156 | log.warning('json-rpc forbidden call rpc_id:%s "%s"', rpc_id, fault_message) | |
157 | elif isinstance(exc, HTTPNotFound): |
|
157 | elif isinstance(exc, HTTPNotFound): | |
158 | method = request.rpc_method |
|
158 | method = request.rpc_method | |
159 | log.debug('json-rpc method `%s` not found in list of ' |
|
159 | log.debug('json-rpc method `%s` not found in list of ' | |
160 | 'api calls: %s, rpc_id:%s', |
|
160 | 'api calls: %s, rpc_id:%s', | |
161 | method, request.registry.jsonrpc_methods.keys(), rpc_id) |
|
161 | method, request.registry.jsonrpc_methods.keys(), rpc_id) | |
162 |
|
162 | |||
163 | similar = 'none' |
|
163 | similar = 'none' | |
164 | try: |
|
164 | try: | |
165 | similar_paterns = ['*{}*'.format(x) for x in method.split('_')] |
|
165 | similar_paterns = ['*{}*'.format(x) for x in method.split('_')] | |
166 | similar_found = find_methods( |
|
166 | similar_found = find_methods( | |
167 | request.registry.jsonrpc_methods, similar_paterns) |
|
167 | request.registry.jsonrpc_methods, similar_paterns) | |
168 | similar = ', '.join(similar_found.keys()) or similar |
|
168 | similar = ', '.join(similar_found.keys()) or similar | |
169 | except Exception: |
|
169 | except Exception: | |
170 | # make the whole above block safe |
|
170 | # make the whole above block safe | |
171 | pass |
|
171 | pass | |
172 |
|
172 | |||
173 | fault_message = "No such method: {}. Similar methods: {}".format( |
|
173 | fault_message = "No such method: {}. Similar methods: {}".format( | |
174 | method, similar) |
|
174 | method, similar) | |
175 | else: |
|
175 | else: | |
176 | fault_message = 'undefined error' |
|
176 | fault_message = 'undefined error' | |
177 | exc_info = exc.exc_info() |
|
177 | exc_info = exc.exc_info() | |
178 | store_exception(id(exc_info), exc_info, prefix='rhodecode-api') |
|
178 | store_exception(id(exc_info), exc_info, prefix='rhodecode-api') | |
179 |
|
179 | |||
180 | statsd = request.registry.statsd |
|
180 | statsd = request.registry.statsd | |
181 | if statsd: |
|
181 | if statsd: | |
182 | statsd.incr('rhodecode_exception', tags=["api"]) |
|
182 | statsd.incr('rhodecode_exception_total', tags=["exc_source:api"]) | |
183 |
|
183 | |||
184 | return jsonrpc_error(request, fault_message, rpc_id) |
|
184 | return jsonrpc_error(request, fault_message, rpc_id) | |
185 |
|
185 | |||
186 |
|
186 | |||
187 | def request_view(request): |
|
187 | def request_view(request): | |
188 | """ |
|
188 | """ | |
189 | Main request handling method. It handles all logic to call a specific |
|
189 | Main request handling method. It handles all logic to call a specific | |
190 | exposed method |
|
190 | exposed method | |
191 | """ |
|
191 | """ | |
192 | # cython compatible inspect |
|
192 | # cython compatible inspect | |
193 | from rhodecode.config.patches import inspect_getargspec |
|
193 | from rhodecode.config.patches import inspect_getargspec | |
194 | inspect = inspect_getargspec() |
|
194 | inspect = inspect_getargspec() | |
195 |
|
195 | |||
196 | # check if we can find this session using api_key, get_by_auth_token |
|
196 | # check if we can find this session using api_key, get_by_auth_token | |
197 | # search not expired tokens only |
|
197 | # search not expired tokens only | |
198 | try: |
|
198 | try: | |
199 | api_user = User.get_by_auth_token(request.rpc_api_key) |
|
199 | api_user = User.get_by_auth_token(request.rpc_api_key) | |
200 |
|
200 | |||
201 | if api_user is None: |
|
201 | if api_user is None: | |
202 | return jsonrpc_error( |
|
202 | return jsonrpc_error( | |
203 | request, retid=request.rpc_id, message='Invalid API KEY') |
|
203 | request, retid=request.rpc_id, message='Invalid API KEY') | |
204 |
|
204 | |||
205 | if not api_user.active: |
|
205 | if not api_user.active: | |
206 | return jsonrpc_error( |
|
206 | return jsonrpc_error( | |
207 | request, retid=request.rpc_id, |
|
207 | request, retid=request.rpc_id, | |
208 | message='Request from this user not allowed') |
|
208 | message='Request from this user not allowed') | |
209 |
|
209 | |||
210 | # check if we are allowed to use this IP |
|
210 | # check if we are allowed to use this IP | |
211 | auth_u = AuthUser( |
|
211 | auth_u = AuthUser( | |
212 | api_user.user_id, request.rpc_api_key, ip_addr=request.rpc_ip_addr) |
|
212 | api_user.user_id, request.rpc_api_key, ip_addr=request.rpc_ip_addr) | |
213 | if not auth_u.ip_allowed: |
|
213 | if not auth_u.ip_allowed: | |
214 | return jsonrpc_error( |
|
214 | return jsonrpc_error( | |
215 | request, retid=request.rpc_id, |
|
215 | request, retid=request.rpc_id, | |
216 | message='Request from IP:%s not allowed' % ( |
|
216 | message='Request from IP:%s not allowed' % ( | |
217 | request.rpc_ip_addr,)) |
|
217 | request.rpc_ip_addr,)) | |
218 | else: |
|
218 | else: | |
219 | log.info('Access for IP:%s allowed', request.rpc_ip_addr) |
|
219 | log.info('Access for IP:%s allowed', request.rpc_ip_addr) | |
220 |
|
220 | |||
221 | # register our auth-user |
|
221 | # register our auth-user | |
222 | request.rpc_user = auth_u |
|
222 | request.rpc_user = auth_u | |
223 | request.environ['rc_auth_user_id'] = auth_u.user_id |
|
223 | request.environ['rc_auth_user_id'] = auth_u.user_id | |
224 |
|
224 | |||
225 | # now check if token is valid for API |
|
225 | # now check if token is valid for API | |
226 | auth_token = request.rpc_api_key |
|
226 | auth_token = request.rpc_api_key | |
227 | token_match = api_user.authenticate_by_token( |
|
227 | token_match = api_user.authenticate_by_token( | |
228 | auth_token, roles=[UserApiKeys.ROLE_API]) |
|
228 | auth_token, roles=[UserApiKeys.ROLE_API]) | |
229 | invalid_token = not token_match |
|
229 | invalid_token = not token_match | |
230 |
|
230 | |||
231 | log.debug('Checking if API KEY is valid with proper role') |
|
231 | log.debug('Checking if API KEY is valid with proper role') | |
232 | if invalid_token: |
|
232 | if invalid_token: | |
233 | return jsonrpc_error( |
|
233 | return jsonrpc_error( | |
234 | request, retid=request.rpc_id, |
|
234 | request, retid=request.rpc_id, | |
235 | message='API KEY invalid or, has bad role for an API call') |
|
235 | message='API KEY invalid or, has bad role for an API call') | |
236 |
|
236 | |||
237 | except Exception: |
|
237 | except Exception: | |
238 | log.exception('Error on API AUTH') |
|
238 | log.exception('Error on API AUTH') | |
239 | return jsonrpc_error( |
|
239 | return jsonrpc_error( | |
240 | request, retid=request.rpc_id, message='Invalid API KEY') |
|
240 | request, retid=request.rpc_id, message='Invalid API KEY') | |
241 |
|
241 | |||
242 | method = request.rpc_method |
|
242 | method = request.rpc_method | |
243 | func = request.registry.jsonrpc_methods[method] |
|
243 | func = request.registry.jsonrpc_methods[method] | |
244 |
|
244 | |||
245 | # now that we have a method, add request._req_params to |
|
245 | # now that we have a method, add request._req_params to | |
246 | # self.kargs and dispatch control to WGIController |
|
246 | # self.kargs and dispatch control to WGIController | |
247 | argspec = inspect.getargspec(func) |
|
247 | argspec = inspect.getargspec(func) | |
248 | arglist = argspec[0] |
|
248 | arglist = argspec[0] | |
249 | defaults = map(type, argspec[3] or []) |
|
249 | defaults = map(type, argspec[3] or []) | |
250 | default_empty = types.NotImplementedType |
|
250 | default_empty = types.NotImplementedType | |
251 |
|
251 | |||
252 | # kw arguments required by this method |
|
252 | # kw arguments required by this method | |
253 | func_kwargs = dict(itertools.izip_longest( |
|
253 | func_kwargs = dict(itertools.izip_longest( | |
254 | reversed(arglist), reversed(defaults), fillvalue=default_empty)) |
|
254 | reversed(arglist), reversed(defaults), fillvalue=default_empty)) | |
255 |
|
255 | |||
256 | # This attribute will need to be first param of a method that uses |
|
256 | # This attribute will need to be first param of a method that uses | |
257 | # api_key, which is translated to instance of user at that name |
|
257 | # api_key, which is translated to instance of user at that name | |
258 | user_var = 'apiuser' |
|
258 | user_var = 'apiuser' | |
259 | request_var = 'request' |
|
259 | request_var = 'request' | |
260 |
|
260 | |||
261 | for arg in [user_var, request_var]: |
|
261 | for arg in [user_var, request_var]: | |
262 | if arg not in arglist: |
|
262 | if arg not in arglist: | |
263 | return jsonrpc_error( |
|
263 | return jsonrpc_error( | |
264 | request, |
|
264 | request, | |
265 | retid=request.rpc_id, |
|
265 | retid=request.rpc_id, | |
266 | message='This method [%s] does not support ' |
|
266 | message='This method [%s] does not support ' | |
267 | 'required parameter `%s`' % (func.__name__, arg)) |
|
267 | 'required parameter `%s`' % (func.__name__, arg)) | |
268 |
|
268 | |||
269 | # get our arglist and check if we provided them as args |
|
269 | # get our arglist and check if we provided them as args | |
270 | for arg, default in func_kwargs.items(): |
|
270 | for arg, default in func_kwargs.items(): | |
271 | if arg in [user_var, request_var]: |
|
271 | if arg in [user_var, request_var]: | |
272 | # user_var and request_var are pre-hardcoded parameters and we |
|
272 | # user_var and request_var are pre-hardcoded parameters and we | |
273 | # don't need to do any translation |
|
273 | # don't need to do any translation | |
274 | continue |
|
274 | continue | |
275 |
|
275 | |||
276 | # skip the required param check if it's default value is |
|
276 | # skip the required param check if it's default value is | |
277 | # NotImplementedType (default_empty) |
|
277 | # NotImplementedType (default_empty) | |
278 | if default == default_empty and arg not in request.rpc_params: |
|
278 | if default == default_empty and arg not in request.rpc_params: | |
279 | return jsonrpc_error( |
|
279 | return jsonrpc_error( | |
280 | request, |
|
280 | request, | |
281 | retid=request.rpc_id, |
|
281 | retid=request.rpc_id, | |
282 | message=('Missing non optional `%s` arg in JSON DATA' % arg) |
|
282 | message=('Missing non optional `%s` arg in JSON DATA' % arg) | |
283 | ) |
|
283 | ) | |
284 |
|
284 | |||
285 | # sanitize extra passed arguments |
|
285 | # sanitize extra passed arguments | |
286 | for k in request.rpc_params.keys()[:]: |
|
286 | for k in request.rpc_params.keys()[:]: | |
287 | if k not in func_kwargs: |
|
287 | if k not in func_kwargs: | |
288 | del request.rpc_params[k] |
|
288 | del request.rpc_params[k] | |
289 |
|
289 | |||
290 | call_params = request.rpc_params |
|
290 | call_params = request.rpc_params | |
291 | call_params.update({ |
|
291 | call_params.update({ | |
292 | 'request': request, |
|
292 | 'request': request, | |
293 | 'apiuser': auth_u |
|
293 | 'apiuser': auth_u | |
294 | }) |
|
294 | }) | |
295 |
|
295 | |||
296 | # register some common functions for usage |
|
296 | # register some common functions for usage | |
297 | attach_context_attributes(TemplateArgs(), request, request.rpc_user.user_id) |
|
297 | attach_context_attributes(TemplateArgs(), request, request.rpc_user.user_id) | |
298 |
|
298 | |||
|
299 | statsd = request.registry.statsd | |||
|
300 | ||||
299 | try: |
|
301 | try: | |
300 | ret_value = func(**call_params) |
|
302 | ret_value = func(**call_params) | |
301 |
re |
|
303 | resp = jsonrpc_response(request, ret_value) | |
|
304 | if statsd: | |||
|
305 | statsd.incr('rhodecode_api_call_success_total') | |||
|
306 | return resp | |||
302 | except JSONRPCBaseError: |
|
307 | except JSONRPCBaseError: | |
303 | raise |
|
308 | raise | |
304 | except Exception: |
|
309 | except Exception: | |
305 | log.exception('Unhandled exception occurred on api call: %s', func) |
|
310 | log.exception('Unhandled exception occurred on api call: %s', func) | |
306 | exc_info = sys.exc_info() |
|
311 | exc_info = sys.exc_info() | |
307 | exc_id, exc_type_name = store_exception( |
|
312 | exc_id, exc_type_name = store_exception( | |
308 | id(exc_info), exc_info, prefix='rhodecode-api') |
|
313 | id(exc_info), exc_info, prefix='rhodecode-api') | |
309 | error_headers = [('RhodeCode-Exception-Id', str(exc_id)), |
|
314 | error_headers = [('RhodeCode-Exception-Id', str(exc_id)), | |
310 | ('RhodeCode-Exception-Type', str(exc_type_name))] |
|
315 | ('RhodeCode-Exception-Type', str(exc_type_name))] | |
311 |
|
|
316 | err_resp = jsonrpc_error( | |
312 | request, retid=request.rpc_id, message='Internal server error', |
|
317 | request, retid=request.rpc_id, message='Internal server error', | |
313 | headers=error_headers) |
|
318 | headers=error_headers) | |
|
319 | if statsd: | |||
|
320 | statsd.incr('rhodecode_api_call_fail_total') | |||
|
321 | return err_resp | |||
314 |
|
322 | |||
315 |
|
323 | |||
316 | def setup_request(request): |
|
324 | def setup_request(request): | |
317 | """ |
|
325 | """ | |
318 | Parse a JSON-RPC request body. It's used inside the predicates method |
|
326 | Parse a JSON-RPC request body. It's used inside the predicates method | |
319 | to validate and bootstrap requests for usage in rpc calls. |
|
327 | to validate and bootstrap requests for usage in rpc calls. | |
320 |
|
328 | |||
321 | We need to raise JSONRPCError here if we want to return some errors back to |
|
329 | We need to raise JSONRPCError here if we want to return some errors back to | |
322 | user. |
|
330 | user. | |
323 | """ |
|
331 | """ | |
324 |
|
332 | |||
325 | log.debug('Executing setup request: %r', request) |
|
333 | log.debug('Executing setup request: %r', request) | |
326 | request.rpc_ip_addr = get_ip_addr(request.environ) |
|
334 | request.rpc_ip_addr = get_ip_addr(request.environ) | |
327 | # TODO(marcink): deprecate GET at some point |
|
335 | # TODO(marcink): deprecate GET at some point | |
328 | if request.method not in ['POST', 'GET']: |
|
336 | if request.method not in ['POST', 'GET']: | |
329 | log.debug('unsupported request method "%s"', request.method) |
|
337 | log.debug('unsupported request method "%s"', request.method) | |
330 | raise JSONRPCError( |
|
338 | raise JSONRPCError( | |
331 | 'unsupported request method "%s". Please use POST' % request.method) |
|
339 | 'unsupported request method "%s". Please use POST' % request.method) | |
332 |
|
340 | |||
333 | if 'CONTENT_LENGTH' not in request.environ: |
|
341 | if 'CONTENT_LENGTH' not in request.environ: | |
334 | log.debug("No Content-Length") |
|
342 | log.debug("No Content-Length") | |
335 | raise JSONRPCError("Empty body, No Content-Length in request") |
|
343 | raise JSONRPCError("Empty body, No Content-Length in request") | |
336 |
|
344 | |||
337 | else: |
|
345 | else: | |
338 | length = request.environ['CONTENT_LENGTH'] |
|
346 | length = request.environ['CONTENT_LENGTH'] | |
339 | log.debug('Content-Length: %s', length) |
|
347 | log.debug('Content-Length: %s', length) | |
340 |
|
348 | |||
341 | if length == 0: |
|
349 | if length == 0: | |
342 | log.debug("Content-Length is 0") |
|
350 | log.debug("Content-Length is 0") | |
343 | raise JSONRPCError("Content-Length is 0") |
|
351 | raise JSONRPCError("Content-Length is 0") | |
344 |
|
352 | |||
345 | raw_body = request.body |
|
353 | raw_body = request.body | |
346 | log.debug("Loading JSON body now") |
|
354 | log.debug("Loading JSON body now") | |
347 | try: |
|
355 | try: | |
348 | json_body = json.loads(raw_body) |
|
356 | json_body = json.loads(raw_body) | |
349 | except ValueError as e: |
|
357 | except ValueError as e: | |
350 | # catch JSON errors Here |
|
358 | # catch JSON errors Here | |
351 | raise JSONRPCError("JSON parse error ERR:%s RAW:%r" % (e, raw_body)) |
|
359 | raise JSONRPCError("JSON parse error ERR:%s RAW:%r" % (e, raw_body)) | |
352 |
|
360 | |||
353 | request.rpc_id = json_body.get('id') |
|
361 | request.rpc_id = json_body.get('id') | |
354 | request.rpc_method = json_body.get('method') |
|
362 | request.rpc_method = json_body.get('method') | |
355 |
|
363 | |||
356 | # check required base parameters |
|
364 | # check required base parameters | |
357 | try: |
|
365 | try: | |
358 | api_key = json_body.get('api_key') |
|
366 | api_key = json_body.get('api_key') | |
359 | if not api_key: |
|
367 | if not api_key: | |
360 | api_key = json_body.get('auth_token') |
|
368 | api_key = json_body.get('auth_token') | |
361 |
|
369 | |||
362 | if not api_key: |
|
370 | if not api_key: | |
363 | raise KeyError('api_key or auth_token') |
|
371 | raise KeyError('api_key or auth_token') | |
364 |
|
372 | |||
365 | # TODO(marcink): support passing in token in request header |
|
373 | # TODO(marcink): support passing in token in request header | |
366 |
|
374 | |||
367 | request.rpc_api_key = api_key |
|
375 | request.rpc_api_key = api_key | |
368 | request.rpc_id = json_body['id'] |
|
376 | request.rpc_id = json_body['id'] | |
369 | request.rpc_method = json_body['method'] |
|
377 | request.rpc_method = json_body['method'] | |
370 | request.rpc_params = json_body['args'] \ |
|
378 | request.rpc_params = json_body['args'] \ | |
371 | if isinstance(json_body['args'], dict) else {} |
|
379 | if isinstance(json_body['args'], dict) else {} | |
372 |
|
380 | |||
373 | log.debug('method: %s, params: %.10240r', request.rpc_method, request.rpc_params) |
|
381 | log.debug('method: %s, params: %.10240r', request.rpc_method, request.rpc_params) | |
374 | except KeyError as e: |
|
382 | except KeyError as e: | |
375 | raise JSONRPCError('Incorrect JSON data. Missing %s' % e) |
|
383 | raise JSONRPCError('Incorrect JSON data. Missing %s' % e) | |
376 |
|
384 | |||
377 | log.debug('setup complete, now handling method:%s rpcid:%s', |
|
385 | log.debug('setup complete, now handling method:%s rpcid:%s', | |
378 | request.rpc_method, request.rpc_id, ) |
|
386 | request.rpc_method, request.rpc_id, ) | |
379 |
|
387 | |||
380 |
|
388 | |||
381 | class RoutePredicate(object): |
|
389 | class RoutePredicate(object): | |
382 | def __init__(self, val, config): |
|
390 | def __init__(self, val, config): | |
383 | self.val = val |
|
391 | self.val = val | |
384 |
|
392 | |||
385 | def text(self): |
|
393 | def text(self): | |
386 | return 'jsonrpc route = %s' % self.val |
|
394 | return 'jsonrpc route = %s' % self.val | |
387 |
|
395 | |||
388 | phash = text |
|
396 | phash = text | |
389 |
|
397 | |||
390 | def __call__(self, info, request): |
|
398 | def __call__(self, info, request): | |
391 | if self.val: |
|
399 | if self.val: | |
392 | # potentially setup and bootstrap our call |
|
400 | # potentially setup and bootstrap our call | |
393 | setup_request(request) |
|
401 | setup_request(request) | |
394 |
|
402 | |||
395 | # Always return True so that even if it isn't a valid RPC it |
|
403 | # Always return True so that even if it isn't a valid RPC it | |
396 | # will fall through to the underlaying handlers like notfound_view |
|
404 | # will fall through to the underlaying handlers like notfound_view | |
397 | return True |
|
405 | return True | |
398 |
|
406 | |||
399 |
|
407 | |||
400 | class NotFoundPredicate(object): |
|
408 | class NotFoundPredicate(object): | |
401 | def __init__(self, val, config): |
|
409 | def __init__(self, val, config): | |
402 | self.val = val |
|
410 | self.val = val | |
403 | self.methods = config.registry.jsonrpc_methods |
|
411 | self.methods = config.registry.jsonrpc_methods | |
404 |
|
412 | |||
405 | def text(self): |
|
413 | def text(self): | |
406 | return 'jsonrpc method not found = {}.'.format(self.val) |
|
414 | return 'jsonrpc method not found = {}.'.format(self.val) | |
407 |
|
415 | |||
408 | phash = text |
|
416 | phash = text | |
409 |
|
417 | |||
410 | def __call__(self, info, request): |
|
418 | def __call__(self, info, request): | |
411 | return hasattr(request, 'rpc_method') |
|
419 | return hasattr(request, 'rpc_method') | |
412 |
|
420 | |||
413 |
|
421 | |||
414 | class MethodPredicate(object): |
|
422 | class MethodPredicate(object): | |
415 | def __init__(self, val, config): |
|
423 | def __init__(self, val, config): | |
416 | self.method = val |
|
424 | self.method = val | |
417 |
|
425 | |||
418 | def text(self): |
|
426 | def text(self): | |
419 | return 'jsonrpc method = %s' % self.method |
|
427 | return 'jsonrpc method = %s' % self.method | |
420 |
|
428 | |||
421 | phash = text |
|
429 | phash = text | |
422 |
|
430 | |||
423 | def __call__(self, context, request): |
|
431 | def __call__(self, context, request): | |
424 | # we need to explicitly return False here, so pyramid doesn't try to |
|
432 | # we need to explicitly return False here, so pyramid doesn't try to | |
425 | # execute our view directly. We need our main handler to execute things |
|
433 | # execute our view directly. We need our main handler to execute things | |
426 | return getattr(request, 'rpc_method') == self.method |
|
434 | return getattr(request, 'rpc_method') == self.method | |
427 |
|
435 | |||
428 |
|
436 | |||
429 | def add_jsonrpc_method(config, view, **kwargs): |
|
437 | def add_jsonrpc_method(config, view, **kwargs): | |
430 | # pop the method name |
|
438 | # pop the method name | |
431 | method = kwargs.pop('method', None) |
|
439 | method = kwargs.pop('method', None) | |
432 |
|
440 | |||
433 | if method is None: |
|
441 | if method is None: | |
434 | raise ConfigurationError( |
|
442 | raise ConfigurationError( | |
435 | 'Cannot register a JSON-RPC method without specifying the "method"') |
|
443 | 'Cannot register a JSON-RPC method without specifying the "method"') | |
436 |
|
444 | |||
437 | # we define custom predicate, to enable to detect conflicting methods, |
|
445 | # we define custom predicate, to enable to detect conflicting methods, | |
438 | # those predicates are kind of "translation" from the decorator variables |
|
446 | # those predicates are kind of "translation" from the decorator variables | |
439 | # to internal predicates names |
|
447 | # to internal predicates names | |
440 |
|
448 | |||
441 | kwargs['jsonrpc_method'] = method |
|
449 | kwargs['jsonrpc_method'] = method | |
442 |
|
450 | |||
443 | # register our view into global view store for validation |
|
451 | # register our view into global view store for validation | |
444 | config.registry.jsonrpc_methods[method] = view |
|
452 | config.registry.jsonrpc_methods[method] = view | |
445 |
|
453 | |||
446 | # we're using our main request_view handler, here, so each method |
|
454 | # we're using our main request_view handler, here, so each method | |
447 | # has a unified handler for itself |
|
455 | # has a unified handler for itself | |
448 | config.add_view(request_view, route_name='apiv2', **kwargs) |
|
456 | config.add_view(request_view, route_name='apiv2', **kwargs) | |
449 |
|
457 | |||
450 |
|
458 | |||
451 | class jsonrpc_method(object): |
|
459 | class jsonrpc_method(object): | |
452 | """ |
|
460 | """ | |
453 | decorator that works similar to @add_view_config decorator, |
|
461 | decorator that works similar to @add_view_config decorator, | |
454 | but tailored for our JSON RPC |
|
462 | but tailored for our JSON RPC | |
455 | """ |
|
463 | """ | |
456 |
|
464 | |||
457 | venusian = venusian # for testing injection |
|
465 | venusian = venusian # for testing injection | |
458 |
|
466 | |||
459 | def __init__(self, method=None, **kwargs): |
|
467 | def __init__(self, method=None, **kwargs): | |
460 | self.method = method |
|
468 | self.method = method | |
461 | self.kwargs = kwargs |
|
469 | self.kwargs = kwargs | |
462 |
|
470 | |||
463 | def __call__(self, wrapped): |
|
471 | def __call__(self, wrapped): | |
464 | kwargs = self.kwargs.copy() |
|
472 | kwargs = self.kwargs.copy() | |
465 | kwargs['method'] = self.method or wrapped.__name__ |
|
473 | kwargs['method'] = self.method or wrapped.__name__ | |
466 | depth = kwargs.pop('_depth', 0) |
|
474 | depth = kwargs.pop('_depth', 0) | |
467 |
|
475 | |||
468 | def callback(context, name, ob): |
|
476 | def callback(context, name, ob): | |
469 | config = context.config.with_package(info.module) |
|
477 | config = context.config.with_package(info.module) | |
470 | config.add_jsonrpc_method(view=ob, **kwargs) |
|
478 | config.add_jsonrpc_method(view=ob, **kwargs) | |
471 |
|
479 | |||
472 | info = venusian.attach(wrapped, callback, category='pyramid', |
|
480 | info = venusian.attach(wrapped, callback, category='pyramid', | |
473 | depth=depth + 1) |
|
481 | depth=depth + 1) | |
474 | if info.scope == 'class': |
|
482 | if info.scope == 'class': | |
475 | # ensure that attr is set if decorating a class method |
|
483 | # ensure that attr is set if decorating a class method | |
476 | kwargs.setdefault('attr', wrapped.__name__) |
|
484 | kwargs.setdefault('attr', wrapped.__name__) | |
477 |
|
485 | |||
478 | kwargs['_info'] = info.codeinfo # fbo action_method |
|
486 | kwargs['_info'] = info.codeinfo # fbo action_method | |
479 | return wrapped |
|
487 | return wrapped | |
480 |
|
488 | |||
481 |
|
489 | |||
482 | class jsonrpc_deprecated_method(object): |
|
490 | class jsonrpc_deprecated_method(object): | |
483 | """ |
|
491 | """ | |
484 | Marks method as deprecated, adds log.warning, and inject special key to |
|
492 | Marks method as deprecated, adds log.warning, and inject special key to | |
485 | the request variable to mark method as deprecated. |
|
493 | the request variable to mark method as deprecated. | |
486 | Also injects special docstring that extract_docs will catch to mark |
|
494 | Also injects special docstring that extract_docs will catch to mark | |
487 | method as deprecated. |
|
495 | method as deprecated. | |
488 |
|
496 | |||
489 | :param use_method: specify which method should be used instead of |
|
497 | :param use_method: specify which method should be used instead of | |
490 | the decorated one |
|
498 | the decorated one | |
491 |
|
499 | |||
492 | Use like:: |
|
500 | Use like:: | |
493 |
|
501 | |||
494 | @jsonrpc_method() |
|
502 | @jsonrpc_method() | |
495 | @jsonrpc_deprecated_method(use_method='new_func', deprecated_at_version='3.0.0') |
|
503 | @jsonrpc_deprecated_method(use_method='new_func', deprecated_at_version='3.0.0') | |
496 | def old_func(request, apiuser, arg1, arg2): |
|
504 | def old_func(request, apiuser, arg1, arg2): | |
497 | ... |
|
505 | ... | |
498 | """ |
|
506 | """ | |
499 |
|
507 | |||
500 | def __init__(self, use_method, deprecated_at_version): |
|
508 | def __init__(self, use_method, deprecated_at_version): | |
501 | self.use_method = use_method |
|
509 | self.use_method = use_method | |
502 | self.deprecated_at_version = deprecated_at_version |
|
510 | self.deprecated_at_version = deprecated_at_version | |
503 | self.deprecated_msg = '' |
|
511 | self.deprecated_msg = '' | |
504 |
|
512 | |||
505 | def __call__(self, func): |
|
513 | def __call__(self, func): | |
506 | self.deprecated_msg = 'Please use method `{method}` instead.'.format( |
|
514 | self.deprecated_msg = 'Please use method `{method}` instead.'.format( | |
507 | method=self.use_method) |
|
515 | method=self.use_method) | |
508 |
|
516 | |||
509 | docstring = """\n |
|
517 | docstring = """\n | |
510 | .. deprecated:: {version} |
|
518 | .. deprecated:: {version} | |
511 |
|
519 | |||
512 | {deprecation_message} |
|
520 | {deprecation_message} | |
513 |
|
521 | |||
514 | {original_docstring} |
|
522 | {original_docstring} | |
515 | """ |
|
523 | """ | |
516 | func.__doc__ = docstring.format( |
|
524 | func.__doc__ = docstring.format( | |
517 | version=self.deprecated_at_version, |
|
525 | version=self.deprecated_at_version, | |
518 | deprecation_message=self.deprecated_msg, |
|
526 | deprecation_message=self.deprecated_msg, | |
519 | original_docstring=func.__doc__) |
|
527 | original_docstring=func.__doc__) | |
520 | return decorator.decorator(self.__wrapper, func) |
|
528 | return decorator.decorator(self.__wrapper, func) | |
521 |
|
529 | |||
522 | def __wrapper(self, func, *fargs, **fkwargs): |
|
530 | def __wrapper(self, func, *fargs, **fkwargs): | |
523 | log.warning('DEPRECATED API CALL on function %s, please ' |
|
531 | log.warning('DEPRECATED API CALL on function %s, please ' | |
524 | 'use `%s` instead', func, self.use_method) |
|
532 | 'use `%s` instead', func, self.use_method) | |
525 | # alter function docstring to mark as deprecated, this is picked up |
|
533 | # alter function docstring to mark as deprecated, this is picked up | |
526 | # via fabric file that generates API DOC. |
|
534 | # via fabric file that generates API DOC. | |
527 | result = func(*fargs, **fkwargs) |
|
535 | result = func(*fargs, **fkwargs) | |
528 |
|
536 | |||
529 | request = fargs[0] |
|
537 | request = fargs[0] | |
530 | request.rpc_deprecation = 'DEPRECATED METHOD ' + self.deprecated_msg |
|
538 | request.rpc_deprecation = 'DEPRECATED METHOD ' + self.deprecated_msg | |
531 | return result |
|
539 | return result | |
532 |
|
540 | |||
533 |
|
541 | |||
534 | def add_api_methods(config): |
|
542 | def add_api_methods(config): | |
535 | from rhodecode.api.views import ( |
|
543 | from rhodecode.api.views import ( | |
536 | deprecated_api, gist_api, pull_request_api, repo_api, repo_group_api, |
|
544 | deprecated_api, gist_api, pull_request_api, repo_api, repo_group_api, | |
537 | server_api, search_api, testing_api, user_api, user_group_api) |
|
545 | server_api, search_api, testing_api, user_api, user_group_api) | |
538 |
|
546 | |||
539 | config.scan('rhodecode.api.views') |
|
547 | config.scan('rhodecode.api.views') | |
540 |
|
548 | |||
541 |
|
549 | |||
542 | def includeme(config): |
|
550 | def includeme(config): | |
543 | plugin_module = 'rhodecode.api' |
|
551 | plugin_module = 'rhodecode.api' | |
544 | plugin_settings = get_plugin_settings( |
|
552 | plugin_settings = get_plugin_settings( | |
545 | plugin_module, config.registry.settings) |
|
553 | plugin_module, config.registry.settings) | |
546 |
|
554 | |||
547 | if not hasattr(config.registry, 'jsonrpc_methods'): |
|
555 | if not hasattr(config.registry, 'jsonrpc_methods'): | |
548 | config.registry.jsonrpc_methods = OrderedDict() |
|
556 | config.registry.jsonrpc_methods = OrderedDict() | |
549 |
|
557 | |||
550 | # match filter by given method only |
|
558 | # match filter by given method only | |
551 | config.add_view_predicate('jsonrpc_method', MethodPredicate) |
|
559 | config.add_view_predicate('jsonrpc_method', MethodPredicate) | |
552 | config.add_view_predicate('jsonrpc_method_not_found', NotFoundPredicate) |
|
560 | config.add_view_predicate('jsonrpc_method_not_found', NotFoundPredicate) | |
553 |
|
561 | |||
554 | config.add_renderer(DEFAULT_RENDERER, ExtJsonRenderer( |
|
562 | config.add_renderer(DEFAULT_RENDERER, ExtJsonRenderer( | |
555 | serializer=json.dumps, indent=4)) |
|
563 | serializer=json.dumps, indent=4)) | |
556 | config.add_directive('add_jsonrpc_method', add_jsonrpc_method) |
|
564 | config.add_directive('add_jsonrpc_method', add_jsonrpc_method) | |
557 |
|
565 | |||
558 | config.add_route_predicate( |
|
566 | config.add_route_predicate( | |
559 | 'jsonrpc_call', RoutePredicate) |
|
567 | 'jsonrpc_call', RoutePredicate) | |
560 |
|
568 | |||
561 | config.add_route( |
|
569 | config.add_route( | |
562 | 'apiv2', plugin_settings.get('url', DEFAULT_URL), jsonrpc_call=True) |
|
570 | 'apiv2', plugin_settings.get('url', DEFAULT_URL), jsonrpc_call=True) | |
563 |
|
571 | |||
564 | # register some exception handling view |
|
572 | # register some exception handling view | |
565 | config.add_view(exception_view, context=JSONRPCBaseError) |
|
573 | config.add_view(exception_view, context=JSONRPCBaseError) | |
566 | config.add_notfound_view(exception_view, jsonrpc_method_not_found=True) |
|
574 | config.add_notfound_view(exception_view, jsonrpc_method_not_found=True) | |
567 |
|
575 | |||
568 | add_api_methods(config) |
|
576 | add_api_methods(config) |
@@ -1,809 +1,816 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | """ |
|
21 | """ | |
22 | Authentication modules |
|
22 | Authentication modules | |
23 | """ |
|
23 | """ | |
24 | import socket |
|
24 | import socket | |
25 | import string |
|
25 | import string | |
26 | import colander |
|
26 | import colander | |
27 | import copy |
|
27 | import copy | |
28 | import logging |
|
28 | import logging | |
29 | import time |
|
29 | import time | |
30 | import traceback |
|
30 | import traceback | |
31 | import warnings |
|
31 | import warnings | |
32 | import functools |
|
32 | import functools | |
33 |
|
33 | |||
34 | from pyramid.threadlocal import get_current_registry |
|
34 | from pyramid.threadlocal import get_current_registry | |
35 |
|
35 | |||
36 | from rhodecode.authentication.interface import IAuthnPluginRegistry |
|
36 | from rhodecode.authentication.interface import IAuthnPluginRegistry | |
37 | from rhodecode.authentication.schema import AuthnPluginSettingsSchemaBase |
|
37 | from rhodecode.authentication.schema import AuthnPluginSettingsSchemaBase | |
38 | from rhodecode.lib import rc_cache |
|
38 | from rhodecode.lib import rc_cache | |
|
39 | from rhodecode.lib.statsd_client import StatsdClient | |||
39 | from rhodecode.lib.auth import PasswordGenerator, _RhodeCodeCryptoBCrypt |
|
40 | from rhodecode.lib.auth import PasswordGenerator, _RhodeCodeCryptoBCrypt | |
40 | from rhodecode.lib.utils2 import safe_int, safe_str |
|
41 | from rhodecode.lib.utils2 import safe_int, safe_str | |
41 |
from rhodecode.lib.exceptions import LdapConnectionError, LdapUsernameError, |
|
42 | from rhodecode.lib.exceptions import (LdapConnectionError, LdapUsernameError, LdapPasswordError) | |
42 | LdapPasswordError |
|
|||
43 | from rhodecode.model.db import User |
|
43 | from rhodecode.model.db import User | |
44 | from rhodecode.model.meta import Session |
|
44 | from rhodecode.model.meta import Session | |
45 | from rhodecode.model.settings import SettingsModel |
|
45 | from rhodecode.model.settings import SettingsModel | |
46 | from rhodecode.model.user import UserModel |
|
46 | from rhodecode.model.user import UserModel | |
47 | from rhodecode.model.user_group import UserGroupModel |
|
47 | from rhodecode.model.user_group import UserGroupModel | |
48 |
|
48 | |||
49 |
|
49 | |||
50 | log = logging.getLogger(__name__) |
|
50 | log = logging.getLogger(__name__) | |
51 |
|
51 | |||
52 | # auth types that authenticate() function can receive |
|
52 | # auth types that authenticate() function can receive | |
53 | VCS_TYPE = 'vcs' |
|
53 | VCS_TYPE = 'vcs' | |
54 | HTTP_TYPE = 'http' |
|
54 | HTTP_TYPE = 'http' | |
55 |
|
55 | |||
56 | external_auth_session_key = 'rhodecode.external_auth' |
|
56 | external_auth_session_key = 'rhodecode.external_auth' | |
57 |
|
57 | |||
58 |
|
58 | |||
59 | class hybrid_property(object): |
|
59 | class hybrid_property(object): | |
60 | """ |
|
60 | """ | |
61 | a property decorator that works both for instance and class |
|
61 | a property decorator that works both for instance and class | |
62 | """ |
|
62 | """ | |
63 | def __init__(self, fget, fset=None, fdel=None, expr=None): |
|
63 | def __init__(self, fget, fset=None, fdel=None, expr=None): | |
64 | self.fget = fget |
|
64 | self.fget = fget | |
65 | self.fset = fset |
|
65 | self.fset = fset | |
66 | self.fdel = fdel |
|
66 | self.fdel = fdel | |
67 | self.expr = expr or fget |
|
67 | self.expr = expr or fget | |
68 | functools.update_wrapper(self, fget) |
|
68 | functools.update_wrapper(self, fget) | |
69 |
|
69 | |||
70 | def __get__(self, instance, owner): |
|
70 | def __get__(self, instance, owner): | |
71 | if instance is None: |
|
71 | if instance is None: | |
72 | return self.expr(owner) |
|
72 | return self.expr(owner) | |
73 | else: |
|
73 | else: | |
74 | return self.fget(instance) |
|
74 | return self.fget(instance) | |
75 |
|
75 | |||
76 | def __set__(self, instance, value): |
|
76 | def __set__(self, instance, value): | |
77 | self.fset(instance, value) |
|
77 | self.fset(instance, value) | |
78 |
|
78 | |||
79 | def __delete__(self, instance): |
|
79 | def __delete__(self, instance): | |
80 | self.fdel(instance) |
|
80 | self.fdel(instance) | |
81 |
|
81 | |||
82 |
|
82 | |||
83 | class LazyFormencode(object): |
|
83 | class LazyFormencode(object): | |
84 | def __init__(self, formencode_obj, *args, **kwargs): |
|
84 | def __init__(self, formencode_obj, *args, **kwargs): | |
85 | self.formencode_obj = formencode_obj |
|
85 | self.formencode_obj = formencode_obj | |
86 | self.args = args |
|
86 | self.args = args | |
87 | self.kwargs = kwargs |
|
87 | self.kwargs = kwargs | |
88 |
|
88 | |||
89 | def __call__(self, *args, **kwargs): |
|
89 | def __call__(self, *args, **kwargs): | |
90 | from inspect import isfunction |
|
90 | from inspect import isfunction | |
91 | formencode_obj = self.formencode_obj |
|
91 | formencode_obj = self.formencode_obj | |
92 | if isfunction(formencode_obj): |
|
92 | if isfunction(formencode_obj): | |
93 | # case we wrap validators into functions |
|
93 | # case we wrap validators into functions | |
94 | formencode_obj = self.formencode_obj(*args, **kwargs) |
|
94 | formencode_obj = self.formencode_obj(*args, **kwargs) | |
95 | return formencode_obj(*self.args, **self.kwargs) |
|
95 | return formencode_obj(*self.args, **self.kwargs) | |
96 |
|
96 | |||
97 |
|
97 | |||
98 | class RhodeCodeAuthPluginBase(object): |
|
98 | class RhodeCodeAuthPluginBase(object): | |
99 | # UID is used to register plugin to the registry |
|
99 | # UID is used to register plugin to the registry | |
100 | uid = None |
|
100 | uid = None | |
101 |
|
101 | |||
102 | # cache the authentication request for N amount of seconds. Some kind |
|
102 | # cache the authentication request for N amount of seconds. Some kind | |
103 | # of authentication methods are very heavy and it's very efficient to cache |
|
103 | # of authentication methods are very heavy and it's very efficient to cache | |
104 | # the result of a call. If it's set to None (default) cache is off |
|
104 | # the result of a call. If it's set to None (default) cache is off | |
105 | AUTH_CACHE_TTL = None |
|
105 | AUTH_CACHE_TTL = None | |
106 | AUTH_CACHE = {} |
|
106 | AUTH_CACHE = {} | |
107 |
|
107 | |||
108 | auth_func_attrs = { |
|
108 | auth_func_attrs = { | |
109 | "username": "unique username", |
|
109 | "username": "unique username", | |
110 | "firstname": "first name", |
|
110 | "firstname": "first name", | |
111 | "lastname": "last name", |
|
111 | "lastname": "last name", | |
112 | "email": "email address", |
|
112 | "email": "email address", | |
113 | "groups": '["list", "of", "groups"]', |
|
113 | "groups": '["list", "of", "groups"]', | |
114 | "user_group_sync": |
|
114 | "user_group_sync": | |
115 | 'True|False defines if returned user groups should be synced', |
|
115 | 'True|False defines if returned user groups should be synced', | |
116 | "extern_name": "name in external source of record", |
|
116 | "extern_name": "name in external source of record", | |
117 | "extern_type": "type of external source of record", |
|
117 | "extern_type": "type of external source of record", | |
118 | "admin": 'True|False defines if user should be RhodeCode super admin', |
|
118 | "admin": 'True|False defines if user should be RhodeCode super admin', | |
119 | "active": |
|
119 | "active": | |
120 | 'True|False defines active state of user internally for RhodeCode', |
|
120 | 'True|False defines active state of user internally for RhodeCode', | |
121 | "active_from_extern": |
|
121 | "active_from_extern": | |
122 | "True|False|None, active state from the external auth, " |
|
122 | "True|False|None, active state from the external auth, " | |
123 | "None means use definition from RhodeCode extern_type active value" |
|
123 | "None means use definition from RhodeCode extern_type active value" | |
124 |
|
124 | |||
125 | } |
|
125 | } | |
126 | # set on authenticate() method and via set_auth_type func. |
|
126 | # set on authenticate() method and via set_auth_type func. | |
127 | auth_type = None |
|
127 | auth_type = None | |
128 |
|
128 | |||
129 | # set on authenticate() method and via set_calling_scope_repo, this is a |
|
129 | # set on authenticate() method and via set_calling_scope_repo, this is a | |
130 | # calling scope repository when doing authentication most likely on VCS |
|
130 | # calling scope repository when doing authentication most likely on VCS | |
131 | # operations |
|
131 | # operations | |
132 | acl_repo_name = None |
|
132 | acl_repo_name = None | |
133 |
|
133 | |||
134 | # List of setting names to store encrypted. Plugins may override this list |
|
134 | # List of setting names to store encrypted. Plugins may override this list | |
135 | # to store settings encrypted. |
|
135 | # to store settings encrypted. | |
136 | _settings_encrypted = [] |
|
136 | _settings_encrypted = [] | |
137 |
|
137 | |||
138 | # Mapping of python to DB settings model types. Plugins may override or |
|
138 | # Mapping of python to DB settings model types. Plugins may override or | |
139 | # extend this mapping. |
|
139 | # extend this mapping. | |
140 | _settings_type_map = { |
|
140 | _settings_type_map = { | |
141 | colander.String: 'unicode', |
|
141 | colander.String: 'unicode', | |
142 | colander.Integer: 'int', |
|
142 | colander.Integer: 'int', | |
143 | colander.Boolean: 'bool', |
|
143 | colander.Boolean: 'bool', | |
144 | colander.List: 'list', |
|
144 | colander.List: 'list', | |
145 | } |
|
145 | } | |
146 |
|
146 | |||
147 | # list of keys in settings that are unsafe to be logged, should be passwords |
|
147 | # list of keys in settings that are unsafe to be logged, should be passwords | |
148 | # or other crucial credentials |
|
148 | # or other crucial credentials | |
149 | _settings_unsafe_keys = [] |
|
149 | _settings_unsafe_keys = [] | |
150 |
|
150 | |||
151 | def __init__(self, plugin_id): |
|
151 | def __init__(self, plugin_id): | |
152 | self._plugin_id = plugin_id |
|
152 | self._plugin_id = plugin_id | |
153 | self._settings = {} |
|
153 | self._settings = {} | |
154 |
|
154 | |||
155 | def __str__(self): |
|
155 | def __str__(self): | |
156 | return self.get_id() |
|
156 | return self.get_id() | |
157 |
|
157 | |||
158 | def _get_setting_full_name(self, name): |
|
158 | def _get_setting_full_name(self, name): | |
159 | """ |
|
159 | """ | |
160 | Return the full setting name used for storing values in the database. |
|
160 | Return the full setting name used for storing values in the database. | |
161 | """ |
|
161 | """ | |
162 | # TODO: johbo: Using the name here is problematic. It would be good to |
|
162 | # TODO: johbo: Using the name here is problematic. It would be good to | |
163 | # introduce either new models in the database to hold Plugin and |
|
163 | # introduce either new models in the database to hold Plugin and | |
164 | # PluginSetting or to use the plugin id here. |
|
164 | # PluginSetting or to use the plugin id here. | |
165 | return 'auth_{}_{}'.format(self.name, name) |
|
165 | return 'auth_{}_{}'.format(self.name, name) | |
166 |
|
166 | |||
167 | def _get_setting_type(self, name): |
|
167 | def _get_setting_type(self, name): | |
168 | """ |
|
168 | """ | |
169 | Return the type of a setting. This type is defined by the SettingsModel |
|
169 | Return the type of a setting. This type is defined by the SettingsModel | |
170 | and determines how the setting is stored in DB. Optionally the suffix |
|
170 | and determines how the setting is stored in DB. Optionally the suffix | |
171 | `.encrypted` is appended to instruct SettingsModel to store it |
|
171 | `.encrypted` is appended to instruct SettingsModel to store it | |
172 | encrypted. |
|
172 | encrypted. | |
173 | """ |
|
173 | """ | |
174 | schema_node = self.get_settings_schema().get(name) |
|
174 | schema_node = self.get_settings_schema().get(name) | |
175 | db_type = self._settings_type_map.get( |
|
175 | db_type = self._settings_type_map.get( | |
176 | type(schema_node.typ), 'unicode') |
|
176 | type(schema_node.typ), 'unicode') | |
177 | if name in self._settings_encrypted: |
|
177 | if name in self._settings_encrypted: | |
178 | db_type = '{}.encrypted'.format(db_type) |
|
178 | db_type = '{}.encrypted'.format(db_type) | |
179 | return db_type |
|
179 | return db_type | |
180 |
|
180 | |||
181 | @classmethod |
|
181 | @classmethod | |
182 | def docs(cls): |
|
182 | def docs(cls): | |
183 | """ |
|
183 | """ | |
184 | Defines documentation url which helps with plugin setup |
|
184 | Defines documentation url which helps with plugin setup | |
185 | """ |
|
185 | """ | |
186 | return '' |
|
186 | return '' | |
187 |
|
187 | |||
188 | @classmethod |
|
188 | @classmethod | |
189 | def icon(cls): |
|
189 | def icon(cls): | |
190 | """ |
|
190 | """ | |
191 | Defines ICON in SVG format for authentication method |
|
191 | Defines ICON in SVG format for authentication method | |
192 | """ |
|
192 | """ | |
193 | return '' |
|
193 | return '' | |
194 |
|
194 | |||
195 | def is_enabled(self): |
|
195 | def is_enabled(self): | |
196 | """ |
|
196 | """ | |
197 | Returns true if this plugin is enabled. An enabled plugin can be |
|
197 | Returns true if this plugin is enabled. An enabled plugin can be | |
198 | configured in the admin interface but it is not consulted during |
|
198 | configured in the admin interface but it is not consulted during | |
199 | authentication. |
|
199 | authentication. | |
200 | """ |
|
200 | """ | |
201 | auth_plugins = SettingsModel().get_auth_plugins() |
|
201 | auth_plugins = SettingsModel().get_auth_plugins() | |
202 | return self.get_id() in auth_plugins |
|
202 | return self.get_id() in auth_plugins | |
203 |
|
203 | |||
204 | def is_active(self, plugin_cached_settings=None): |
|
204 | def is_active(self, plugin_cached_settings=None): | |
205 | """ |
|
205 | """ | |
206 | Returns true if the plugin is activated. An activated plugin is |
|
206 | Returns true if the plugin is activated. An activated plugin is | |
207 | consulted during authentication, assumed it is also enabled. |
|
207 | consulted during authentication, assumed it is also enabled. | |
208 | """ |
|
208 | """ | |
209 | return self.get_setting_by_name( |
|
209 | return self.get_setting_by_name( | |
210 | 'enabled', plugin_cached_settings=plugin_cached_settings) |
|
210 | 'enabled', plugin_cached_settings=plugin_cached_settings) | |
211 |
|
211 | |||
212 | def get_id(self): |
|
212 | def get_id(self): | |
213 | """ |
|
213 | """ | |
214 | Returns the plugin id. |
|
214 | Returns the plugin id. | |
215 | """ |
|
215 | """ | |
216 | return self._plugin_id |
|
216 | return self._plugin_id | |
217 |
|
217 | |||
218 | def get_display_name(self, load_from_settings=False): |
|
218 | def get_display_name(self, load_from_settings=False): | |
219 | """ |
|
219 | """ | |
220 | Returns a translation string for displaying purposes. |
|
220 | Returns a translation string for displaying purposes. | |
221 | if load_from_settings is set, plugin settings can override the display name |
|
221 | if load_from_settings is set, plugin settings can override the display name | |
222 | """ |
|
222 | """ | |
223 | raise NotImplementedError('Not implemented in base class') |
|
223 | raise NotImplementedError('Not implemented in base class') | |
224 |
|
224 | |||
225 | def get_settings_schema(self): |
|
225 | def get_settings_schema(self): | |
226 | """ |
|
226 | """ | |
227 | Returns a colander schema, representing the plugin settings. |
|
227 | Returns a colander schema, representing the plugin settings. | |
228 | """ |
|
228 | """ | |
229 | return AuthnPluginSettingsSchemaBase() |
|
229 | return AuthnPluginSettingsSchemaBase() | |
230 |
|
230 | |||
231 | def _propagate_settings(self, raw_settings): |
|
231 | def _propagate_settings(self, raw_settings): | |
232 | settings = {} |
|
232 | settings = {} | |
233 | for node in self.get_settings_schema(): |
|
233 | for node in self.get_settings_schema(): | |
234 | settings[node.name] = self.get_setting_by_name( |
|
234 | settings[node.name] = self.get_setting_by_name( | |
235 | node.name, plugin_cached_settings=raw_settings) |
|
235 | node.name, plugin_cached_settings=raw_settings) | |
236 | return settings |
|
236 | return settings | |
237 |
|
237 | |||
238 | def get_settings(self, use_cache=True): |
|
238 | def get_settings(self, use_cache=True): | |
239 | """ |
|
239 | """ | |
240 | Returns the plugin settings as dictionary. |
|
240 | Returns the plugin settings as dictionary. | |
241 | """ |
|
241 | """ | |
242 | if self._settings != {} and use_cache: |
|
242 | if self._settings != {} and use_cache: | |
243 | return self._settings |
|
243 | return self._settings | |
244 |
|
244 | |||
245 | raw_settings = SettingsModel().get_all_settings() |
|
245 | raw_settings = SettingsModel().get_all_settings() | |
246 | settings = self._propagate_settings(raw_settings) |
|
246 | settings = self._propagate_settings(raw_settings) | |
247 |
|
247 | |||
248 | self._settings = settings |
|
248 | self._settings = settings | |
249 | return self._settings |
|
249 | return self._settings | |
250 |
|
250 | |||
251 | def get_setting_by_name(self, name, default=None, plugin_cached_settings=None): |
|
251 | def get_setting_by_name(self, name, default=None, plugin_cached_settings=None): | |
252 | """ |
|
252 | """ | |
253 | Returns a plugin setting by name. |
|
253 | Returns a plugin setting by name. | |
254 | """ |
|
254 | """ | |
255 | full_name = 'rhodecode_{}'.format(self._get_setting_full_name(name)) |
|
255 | full_name = 'rhodecode_{}'.format(self._get_setting_full_name(name)) | |
256 | if plugin_cached_settings: |
|
256 | if plugin_cached_settings: | |
257 | plugin_settings = plugin_cached_settings |
|
257 | plugin_settings = plugin_cached_settings | |
258 | else: |
|
258 | else: | |
259 | plugin_settings = SettingsModel().get_all_settings() |
|
259 | plugin_settings = SettingsModel().get_all_settings() | |
260 |
|
260 | |||
261 | if full_name in plugin_settings: |
|
261 | if full_name in plugin_settings: | |
262 | return plugin_settings[full_name] |
|
262 | return plugin_settings[full_name] | |
263 | else: |
|
263 | else: | |
264 | return default |
|
264 | return default | |
265 |
|
265 | |||
266 | def create_or_update_setting(self, name, value): |
|
266 | def create_or_update_setting(self, name, value): | |
267 | """ |
|
267 | """ | |
268 | Create or update a setting for this plugin in the persistent storage. |
|
268 | Create or update a setting for this plugin in the persistent storage. | |
269 | """ |
|
269 | """ | |
270 | full_name = self._get_setting_full_name(name) |
|
270 | full_name = self._get_setting_full_name(name) | |
271 | type_ = self._get_setting_type(name) |
|
271 | type_ = self._get_setting_type(name) | |
272 | db_setting = SettingsModel().create_or_update_setting( |
|
272 | db_setting = SettingsModel().create_or_update_setting( | |
273 | full_name, value, type_) |
|
273 | full_name, value, type_) | |
274 | return db_setting.app_settings_value |
|
274 | return db_setting.app_settings_value | |
275 |
|
275 | |||
276 | def log_safe_settings(self, settings): |
|
276 | def log_safe_settings(self, settings): | |
277 | """ |
|
277 | """ | |
278 | returns a log safe representation of settings, without any secrets |
|
278 | returns a log safe representation of settings, without any secrets | |
279 | """ |
|
279 | """ | |
280 | settings_copy = copy.deepcopy(settings) |
|
280 | settings_copy = copy.deepcopy(settings) | |
281 | for k in self._settings_unsafe_keys: |
|
281 | for k in self._settings_unsafe_keys: | |
282 | if k in settings_copy: |
|
282 | if k in settings_copy: | |
283 | del settings_copy[k] |
|
283 | del settings_copy[k] | |
284 | return settings_copy |
|
284 | return settings_copy | |
285 |
|
285 | |||
286 | @hybrid_property |
|
286 | @hybrid_property | |
287 | def name(self): |
|
287 | def name(self): | |
288 | """ |
|
288 | """ | |
289 | Returns the name of this authentication plugin. |
|
289 | Returns the name of this authentication plugin. | |
290 |
|
290 | |||
291 | :returns: string |
|
291 | :returns: string | |
292 | """ |
|
292 | """ | |
293 | raise NotImplementedError("Not implemented in base class") |
|
293 | raise NotImplementedError("Not implemented in base class") | |
294 |
|
294 | |||
295 | def get_url_slug(self): |
|
295 | def get_url_slug(self): | |
296 | """ |
|
296 | """ | |
297 | Returns a slug which should be used when constructing URLs which refer |
|
297 | Returns a slug which should be used when constructing URLs which refer | |
298 | to this plugin. By default it returns the plugin name. If the name is |
|
298 | to this plugin. By default it returns the plugin name. If the name is | |
299 | not suitable for using it in an URL the plugin should override this |
|
299 | not suitable for using it in an URL the plugin should override this | |
300 | method. |
|
300 | method. | |
301 | """ |
|
301 | """ | |
302 | return self.name |
|
302 | return self.name | |
303 |
|
303 | |||
304 | @property |
|
304 | @property | |
305 | def is_headers_auth(self): |
|
305 | def is_headers_auth(self): | |
306 | """ |
|
306 | """ | |
307 | Returns True if this authentication plugin uses HTTP headers as |
|
307 | Returns True if this authentication plugin uses HTTP headers as | |
308 | authentication method. |
|
308 | authentication method. | |
309 | """ |
|
309 | """ | |
310 | return False |
|
310 | return False | |
311 |
|
311 | |||
312 | @hybrid_property |
|
312 | @hybrid_property | |
313 | def is_container_auth(self): |
|
313 | def is_container_auth(self): | |
314 | """ |
|
314 | """ | |
315 | Deprecated method that indicates if this authentication plugin uses |
|
315 | Deprecated method that indicates if this authentication plugin uses | |
316 | HTTP headers as authentication method. |
|
316 | HTTP headers as authentication method. | |
317 | """ |
|
317 | """ | |
318 | warnings.warn( |
|
318 | warnings.warn( | |
319 | 'Use is_headers_auth instead.', category=DeprecationWarning) |
|
319 | 'Use is_headers_auth instead.', category=DeprecationWarning) | |
320 | return self.is_headers_auth |
|
320 | return self.is_headers_auth | |
321 |
|
321 | |||
322 | @hybrid_property |
|
322 | @hybrid_property | |
323 | def allows_creating_users(self): |
|
323 | def allows_creating_users(self): | |
324 | """ |
|
324 | """ | |
325 | Defines if Plugin allows users to be created on-the-fly when |
|
325 | Defines if Plugin allows users to be created on-the-fly when | |
326 | authentication is called. Controls how external plugins should behave |
|
326 | authentication is called. Controls how external plugins should behave | |
327 | in terms if they are allowed to create new users, or not. Base plugins |
|
327 | in terms if they are allowed to create new users, or not. Base plugins | |
328 | should not be allowed to, but External ones should be ! |
|
328 | should not be allowed to, but External ones should be ! | |
329 |
|
329 | |||
330 | :return: bool |
|
330 | :return: bool | |
331 | """ |
|
331 | """ | |
332 | return False |
|
332 | return False | |
333 |
|
333 | |||
334 | def set_auth_type(self, auth_type): |
|
334 | def set_auth_type(self, auth_type): | |
335 | self.auth_type = auth_type |
|
335 | self.auth_type = auth_type | |
336 |
|
336 | |||
337 | def set_calling_scope_repo(self, acl_repo_name): |
|
337 | def set_calling_scope_repo(self, acl_repo_name): | |
338 | self.acl_repo_name = acl_repo_name |
|
338 | self.acl_repo_name = acl_repo_name | |
339 |
|
339 | |||
340 | def allows_authentication_from( |
|
340 | def allows_authentication_from( | |
341 | self, user, allows_non_existing_user=True, |
|
341 | self, user, allows_non_existing_user=True, | |
342 | allowed_auth_plugins=None, allowed_auth_sources=None): |
|
342 | allowed_auth_plugins=None, allowed_auth_sources=None): | |
343 | """ |
|
343 | """ | |
344 | Checks if this authentication module should accept a request for |
|
344 | Checks if this authentication module should accept a request for | |
345 | the current user. |
|
345 | the current user. | |
346 |
|
346 | |||
347 | :param user: user object fetched using plugin's get_user() method. |
|
347 | :param user: user object fetched using plugin's get_user() method. | |
348 | :param allows_non_existing_user: if True, don't allow the |
|
348 | :param allows_non_existing_user: if True, don't allow the | |
349 | user to be empty, meaning not existing in our database |
|
349 | user to be empty, meaning not existing in our database | |
350 | :param allowed_auth_plugins: if provided, users extern_type will be |
|
350 | :param allowed_auth_plugins: if provided, users extern_type will be | |
351 | checked against a list of provided extern types, which are plugin |
|
351 | checked against a list of provided extern types, which are plugin | |
352 | auth_names in the end |
|
352 | auth_names in the end | |
353 | :param allowed_auth_sources: authentication type allowed, |
|
353 | :param allowed_auth_sources: authentication type allowed, | |
354 | `http` or `vcs` default is both. |
|
354 | `http` or `vcs` default is both. | |
355 | defines if plugin will accept only http authentication vcs |
|
355 | defines if plugin will accept only http authentication vcs | |
356 | authentication(git/hg) or both |
|
356 | authentication(git/hg) or both | |
357 | :returns: boolean |
|
357 | :returns: boolean | |
358 | """ |
|
358 | """ | |
359 | if not user and not allows_non_existing_user: |
|
359 | if not user and not allows_non_existing_user: | |
360 | log.debug('User is empty but plugin does not allow empty users,' |
|
360 | log.debug('User is empty but plugin does not allow empty users,' | |
361 | 'not allowed to authenticate') |
|
361 | 'not allowed to authenticate') | |
362 | return False |
|
362 | return False | |
363 |
|
363 | |||
364 | expected_auth_plugins = allowed_auth_plugins or [self.name] |
|
364 | expected_auth_plugins = allowed_auth_plugins or [self.name] | |
365 | if user and (user.extern_type and |
|
365 | if user and (user.extern_type and | |
366 | user.extern_type not in expected_auth_plugins): |
|
366 | user.extern_type not in expected_auth_plugins): | |
367 | log.debug( |
|
367 | log.debug( | |
368 | 'User `%s` is bound to `%s` auth type. Plugin allows only ' |
|
368 | 'User `%s` is bound to `%s` auth type. Plugin allows only ' | |
369 | '%s, skipping', user, user.extern_type, expected_auth_plugins) |
|
369 | '%s, skipping', user, user.extern_type, expected_auth_plugins) | |
370 |
|
370 | |||
371 | return False |
|
371 | return False | |
372 |
|
372 | |||
373 | # by default accept both |
|
373 | # by default accept both | |
374 | expected_auth_from = allowed_auth_sources or [HTTP_TYPE, VCS_TYPE] |
|
374 | expected_auth_from = allowed_auth_sources or [HTTP_TYPE, VCS_TYPE] | |
375 | if self.auth_type not in expected_auth_from: |
|
375 | if self.auth_type not in expected_auth_from: | |
376 | log.debug('Current auth source is %s but plugin only allows %s', |
|
376 | log.debug('Current auth source is %s but plugin only allows %s', | |
377 | self.auth_type, expected_auth_from) |
|
377 | self.auth_type, expected_auth_from) | |
378 | return False |
|
378 | return False | |
379 |
|
379 | |||
380 | return True |
|
380 | return True | |
381 |
|
381 | |||
382 | def get_user(self, username=None, **kwargs): |
|
382 | def get_user(self, username=None, **kwargs): | |
383 | """ |
|
383 | """ | |
384 | Helper method for user fetching in plugins, by default it's using |
|
384 | Helper method for user fetching in plugins, by default it's using | |
385 | simple fetch by username, but this method can be custimized in plugins |
|
385 | simple fetch by username, but this method can be custimized in plugins | |
386 | eg. headers auth plugin to fetch user by environ params |
|
386 | eg. headers auth plugin to fetch user by environ params | |
387 |
|
387 | |||
388 | :param username: username if given to fetch from database |
|
388 | :param username: username if given to fetch from database | |
389 | :param kwargs: extra arguments needed for user fetching. |
|
389 | :param kwargs: extra arguments needed for user fetching. | |
390 | """ |
|
390 | """ | |
391 | user = None |
|
391 | user = None | |
392 | log.debug( |
|
392 | log.debug( | |
393 | 'Trying to fetch user `%s` from RhodeCode database', username) |
|
393 | 'Trying to fetch user `%s` from RhodeCode database', username) | |
394 | if username: |
|
394 | if username: | |
395 | user = User.get_by_username(username) |
|
395 | user = User.get_by_username(username) | |
396 | if not user: |
|
396 | if not user: | |
397 | log.debug('User not found, fallback to fetch user in ' |
|
397 | log.debug('User not found, fallback to fetch user in ' | |
398 | 'case insensitive mode') |
|
398 | 'case insensitive mode') | |
399 | user = User.get_by_username(username, case_insensitive=True) |
|
399 | user = User.get_by_username(username, case_insensitive=True) | |
400 | else: |
|
400 | else: | |
401 | log.debug('provided username:`%s` is empty skipping...', username) |
|
401 | log.debug('provided username:`%s` is empty skipping...', username) | |
402 | if not user: |
|
402 | if not user: | |
403 | log.debug('User `%s` not found in database', username) |
|
403 | log.debug('User `%s` not found in database', username) | |
404 | else: |
|
404 | else: | |
405 | log.debug('Got DB user:%s', user) |
|
405 | log.debug('Got DB user:%s', user) | |
406 | return user |
|
406 | return user | |
407 |
|
407 | |||
408 | def user_activation_state(self): |
|
408 | def user_activation_state(self): | |
409 | """ |
|
409 | """ | |
410 | Defines user activation state when creating new users |
|
410 | Defines user activation state when creating new users | |
411 |
|
411 | |||
412 | :returns: boolean |
|
412 | :returns: boolean | |
413 | """ |
|
413 | """ | |
414 | raise NotImplementedError("Not implemented in base class") |
|
414 | raise NotImplementedError("Not implemented in base class") | |
415 |
|
415 | |||
416 | def auth(self, userobj, username, passwd, settings, **kwargs): |
|
416 | def auth(self, userobj, username, passwd, settings, **kwargs): | |
417 | """ |
|
417 | """ | |
418 | Given a user object (which may be null), username, a plaintext |
|
418 | Given a user object (which may be null), username, a plaintext | |
419 | password, and a settings object (containing all the keys needed as |
|
419 | password, and a settings object (containing all the keys needed as | |
420 | listed in settings()), authenticate this user's login attempt. |
|
420 | listed in settings()), authenticate this user's login attempt. | |
421 |
|
421 | |||
422 | Return None on failure. On success, return a dictionary of the form: |
|
422 | Return None on failure. On success, return a dictionary of the form: | |
423 |
|
423 | |||
424 | see: RhodeCodeAuthPluginBase.auth_func_attrs |
|
424 | see: RhodeCodeAuthPluginBase.auth_func_attrs | |
425 | This is later validated for correctness |
|
425 | This is later validated for correctness | |
426 | """ |
|
426 | """ | |
427 | raise NotImplementedError("not implemented in base class") |
|
427 | raise NotImplementedError("not implemented in base class") | |
428 |
|
428 | |||
429 | def _authenticate(self, userobj, username, passwd, settings, **kwargs): |
|
429 | def _authenticate(self, userobj, username, passwd, settings, **kwargs): | |
430 | """ |
|
430 | """ | |
431 | Wrapper to call self.auth() that validates call on it |
|
431 | Wrapper to call self.auth() that validates call on it | |
432 |
|
432 | |||
433 | :param userobj: userobj |
|
433 | :param userobj: userobj | |
434 | :param username: username |
|
434 | :param username: username | |
435 | :param passwd: plaintext password |
|
435 | :param passwd: plaintext password | |
436 | :param settings: plugin settings |
|
436 | :param settings: plugin settings | |
437 | """ |
|
437 | """ | |
438 | auth = self.auth(userobj, username, passwd, settings, **kwargs) |
|
438 | auth = self.auth(userobj, username, passwd, settings, **kwargs) | |
439 | if auth: |
|
439 | if auth: | |
440 | auth['_plugin'] = self.name |
|
440 | auth['_plugin'] = self.name | |
441 | auth['_ttl_cache'] = self.get_ttl_cache(settings) |
|
441 | auth['_ttl_cache'] = self.get_ttl_cache(settings) | |
442 | # check if hash should be migrated ? |
|
442 | # check if hash should be migrated ? | |
443 | new_hash = auth.get('_hash_migrate') |
|
443 | new_hash = auth.get('_hash_migrate') | |
444 | if new_hash: |
|
444 | if new_hash: | |
445 | self._migrate_hash_to_bcrypt(username, passwd, new_hash) |
|
445 | self._migrate_hash_to_bcrypt(username, passwd, new_hash) | |
446 | if 'user_group_sync' not in auth: |
|
446 | if 'user_group_sync' not in auth: | |
447 | auth['user_group_sync'] = False |
|
447 | auth['user_group_sync'] = False | |
448 | return self._validate_auth_return(auth) |
|
448 | return self._validate_auth_return(auth) | |
449 | return auth |
|
449 | return auth | |
450 |
|
450 | |||
451 | def _migrate_hash_to_bcrypt(self, username, password, new_hash): |
|
451 | def _migrate_hash_to_bcrypt(self, username, password, new_hash): | |
452 | new_hash_cypher = _RhodeCodeCryptoBCrypt() |
|
452 | new_hash_cypher = _RhodeCodeCryptoBCrypt() | |
453 | # extra checks, so make sure new hash is correct. |
|
453 | # extra checks, so make sure new hash is correct. | |
454 | password_encoded = safe_str(password) |
|
454 | password_encoded = safe_str(password) | |
455 | if new_hash and new_hash_cypher.hash_check( |
|
455 | if new_hash and new_hash_cypher.hash_check( | |
456 | password_encoded, new_hash): |
|
456 | password_encoded, new_hash): | |
457 | cur_user = User.get_by_username(username) |
|
457 | cur_user = User.get_by_username(username) | |
458 | cur_user.password = new_hash |
|
458 | cur_user.password = new_hash | |
459 | Session().add(cur_user) |
|
459 | Session().add(cur_user) | |
460 | Session().flush() |
|
460 | Session().flush() | |
461 | log.info('Migrated user %s hash to bcrypt', cur_user) |
|
461 | log.info('Migrated user %s hash to bcrypt', cur_user) | |
462 |
|
462 | |||
463 | def _validate_auth_return(self, ret): |
|
463 | def _validate_auth_return(self, ret): | |
464 | if not isinstance(ret, dict): |
|
464 | if not isinstance(ret, dict): | |
465 | raise Exception('returned value from auth must be a dict') |
|
465 | raise Exception('returned value from auth must be a dict') | |
466 | for k in self.auth_func_attrs: |
|
466 | for k in self.auth_func_attrs: | |
467 | if k not in ret: |
|
467 | if k not in ret: | |
468 | raise Exception('Missing %s attribute from returned data' % k) |
|
468 | raise Exception('Missing %s attribute from returned data' % k) | |
469 | return ret |
|
469 | return ret | |
470 |
|
470 | |||
471 | def get_ttl_cache(self, settings=None): |
|
471 | def get_ttl_cache(self, settings=None): | |
472 | plugin_settings = settings or self.get_settings() |
|
472 | plugin_settings = settings or self.get_settings() | |
473 | # we set default to 30, we make a compromise here, |
|
473 | # we set default to 30, we make a compromise here, | |
474 | # performance > security, mostly due to LDAP/SVN, majority |
|
474 | # performance > security, mostly due to LDAP/SVN, majority | |
475 | # of users pick cache_ttl to be enabled |
|
475 | # of users pick cache_ttl to be enabled | |
476 | from rhodecode.authentication import plugin_default_auth_ttl |
|
476 | from rhodecode.authentication import plugin_default_auth_ttl | |
477 | cache_ttl = plugin_default_auth_ttl |
|
477 | cache_ttl = plugin_default_auth_ttl | |
478 |
|
478 | |||
479 | if isinstance(self.AUTH_CACHE_TTL, (int, long)): |
|
479 | if isinstance(self.AUTH_CACHE_TTL, (int, long)): | |
480 | # plugin cache set inside is more important than the settings value |
|
480 | # plugin cache set inside is more important than the settings value | |
481 | cache_ttl = self.AUTH_CACHE_TTL |
|
481 | cache_ttl = self.AUTH_CACHE_TTL | |
482 | elif plugin_settings.get('cache_ttl'): |
|
482 | elif plugin_settings.get('cache_ttl'): | |
483 | cache_ttl = safe_int(plugin_settings.get('cache_ttl'), 0) |
|
483 | cache_ttl = safe_int(plugin_settings.get('cache_ttl'), 0) | |
484 |
|
484 | |||
485 | plugin_cache_active = bool(cache_ttl and cache_ttl > 0) |
|
485 | plugin_cache_active = bool(cache_ttl and cache_ttl > 0) | |
486 | return plugin_cache_active, cache_ttl |
|
486 | return plugin_cache_active, cache_ttl | |
487 |
|
487 | |||
488 |
|
488 | |||
489 | class RhodeCodeExternalAuthPlugin(RhodeCodeAuthPluginBase): |
|
489 | class RhodeCodeExternalAuthPlugin(RhodeCodeAuthPluginBase): | |
490 |
|
490 | |||
491 | @hybrid_property |
|
491 | @hybrid_property | |
492 | def allows_creating_users(self): |
|
492 | def allows_creating_users(self): | |
493 | return True |
|
493 | return True | |
494 |
|
494 | |||
495 | def use_fake_password(self): |
|
495 | def use_fake_password(self): | |
496 | """ |
|
496 | """ | |
497 | Return a boolean that indicates whether or not we should set the user's |
|
497 | Return a boolean that indicates whether or not we should set the user's | |
498 | password to a random value when it is authenticated by this plugin. |
|
498 | password to a random value when it is authenticated by this plugin. | |
499 | If your plugin provides authentication, then you will generally |
|
499 | If your plugin provides authentication, then you will generally | |
500 | want this. |
|
500 | want this. | |
501 |
|
501 | |||
502 | :returns: boolean |
|
502 | :returns: boolean | |
503 | """ |
|
503 | """ | |
504 | raise NotImplementedError("Not implemented in base class") |
|
504 | raise NotImplementedError("Not implemented in base class") | |
505 |
|
505 | |||
506 | def _authenticate(self, userobj, username, passwd, settings, **kwargs): |
|
506 | def _authenticate(self, userobj, username, passwd, settings, **kwargs): | |
507 | # at this point _authenticate calls plugin's `auth()` function |
|
507 | # at this point _authenticate calls plugin's `auth()` function | |
508 | auth = super(RhodeCodeExternalAuthPlugin, self)._authenticate( |
|
508 | auth = super(RhodeCodeExternalAuthPlugin, self)._authenticate( | |
509 | userobj, username, passwd, settings, **kwargs) |
|
509 | userobj, username, passwd, settings, **kwargs) | |
510 |
|
510 | |||
511 | if auth: |
|
511 | if auth: | |
512 | # maybe plugin will clean the username ? |
|
512 | # maybe plugin will clean the username ? | |
513 | # we should use the return value |
|
513 | # we should use the return value | |
514 | username = auth['username'] |
|
514 | username = auth['username'] | |
515 |
|
515 | |||
516 | # if external source tells us that user is not active, we should |
|
516 | # if external source tells us that user is not active, we should | |
517 | # skip rest of the process. This can prevent from creating users in |
|
517 | # skip rest of the process. This can prevent from creating users in | |
518 | # RhodeCode when using external authentication, but if it's |
|
518 | # RhodeCode when using external authentication, but if it's | |
519 | # inactive user we shouldn't create that user anyway |
|
519 | # inactive user we shouldn't create that user anyway | |
520 | if auth['active_from_extern'] is False: |
|
520 | if auth['active_from_extern'] is False: | |
521 | log.warning( |
|
521 | log.warning( | |
522 | "User %s authenticated against %s, but is inactive", |
|
522 | "User %s authenticated against %s, but is inactive", | |
523 | username, self.__module__) |
|
523 | username, self.__module__) | |
524 | return None |
|
524 | return None | |
525 |
|
525 | |||
526 | cur_user = User.get_by_username(username, case_insensitive=True) |
|
526 | cur_user = User.get_by_username(username, case_insensitive=True) | |
527 | is_user_existing = cur_user is not None |
|
527 | is_user_existing = cur_user is not None | |
528 |
|
528 | |||
529 | if is_user_existing: |
|
529 | if is_user_existing: | |
530 | log.debug('Syncing user `%s` from ' |
|
530 | log.debug('Syncing user `%s` from ' | |
531 | '`%s` plugin', username, self.name) |
|
531 | '`%s` plugin', username, self.name) | |
532 | else: |
|
532 | else: | |
533 | log.debug('Creating non existing user `%s` from ' |
|
533 | log.debug('Creating non existing user `%s` from ' | |
534 | '`%s` plugin', username, self.name) |
|
534 | '`%s` plugin', username, self.name) | |
535 |
|
535 | |||
536 | if self.allows_creating_users: |
|
536 | if self.allows_creating_users: | |
537 | log.debug('Plugin `%s` allows to ' |
|
537 | log.debug('Plugin `%s` allows to ' | |
538 | 'create new users', self.name) |
|
538 | 'create new users', self.name) | |
539 | else: |
|
539 | else: | |
540 | log.debug('Plugin `%s` does not allow to ' |
|
540 | log.debug('Plugin `%s` does not allow to ' | |
541 | 'create new users', self.name) |
|
541 | 'create new users', self.name) | |
542 |
|
542 | |||
543 | user_parameters = { |
|
543 | user_parameters = { | |
544 | 'username': username, |
|
544 | 'username': username, | |
545 | 'email': auth["email"], |
|
545 | 'email': auth["email"], | |
546 | 'firstname': auth["firstname"], |
|
546 | 'firstname': auth["firstname"], | |
547 | 'lastname': auth["lastname"], |
|
547 | 'lastname': auth["lastname"], | |
548 | 'active': auth["active"], |
|
548 | 'active': auth["active"], | |
549 | 'admin': auth["admin"], |
|
549 | 'admin': auth["admin"], | |
550 | 'extern_name': auth["extern_name"], |
|
550 | 'extern_name': auth["extern_name"], | |
551 | 'extern_type': self.name, |
|
551 | 'extern_type': self.name, | |
552 | 'plugin': self, |
|
552 | 'plugin': self, | |
553 | 'allow_to_create_user': self.allows_creating_users, |
|
553 | 'allow_to_create_user': self.allows_creating_users, | |
554 | } |
|
554 | } | |
555 |
|
555 | |||
556 | if not is_user_existing: |
|
556 | if not is_user_existing: | |
557 | if self.use_fake_password(): |
|
557 | if self.use_fake_password(): | |
558 | # Randomize the PW because we don't need it, but don't want |
|
558 | # Randomize the PW because we don't need it, but don't want | |
559 | # them blank either |
|
559 | # them blank either | |
560 | passwd = PasswordGenerator().gen_password(length=16) |
|
560 | passwd = PasswordGenerator().gen_password(length=16) | |
561 | user_parameters['password'] = passwd |
|
561 | user_parameters['password'] = passwd | |
562 | else: |
|
562 | else: | |
563 | # Since the password is required by create_or_update method of |
|
563 | # Since the password is required by create_or_update method of | |
564 | # UserModel, we need to set it explicitly. |
|
564 | # UserModel, we need to set it explicitly. | |
565 | # The create_or_update method is smart and recognises the |
|
565 | # The create_or_update method is smart and recognises the | |
566 | # password hashes as well. |
|
566 | # password hashes as well. | |
567 | user_parameters['password'] = cur_user.password |
|
567 | user_parameters['password'] = cur_user.password | |
568 |
|
568 | |||
569 | # we either create or update users, we also pass the flag |
|
569 | # we either create or update users, we also pass the flag | |
570 | # that controls if this method can actually do that. |
|
570 | # that controls if this method can actually do that. | |
571 | # raises NotAllowedToCreateUserError if it cannot, and we try to. |
|
571 | # raises NotAllowedToCreateUserError if it cannot, and we try to. | |
572 | user = UserModel().create_or_update(**user_parameters) |
|
572 | user = UserModel().create_or_update(**user_parameters) | |
573 | Session().flush() |
|
573 | Session().flush() | |
574 | # enforce user is just in given groups, all of them has to be ones |
|
574 | # enforce user is just in given groups, all of them has to be ones | |
575 | # created from plugins. We store this info in _group_data JSON |
|
575 | # created from plugins. We store this info in _group_data JSON | |
576 | # field |
|
576 | # field | |
577 |
|
577 | |||
578 | if auth['user_group_sync']: |
|
578 | if auth['user_group_sync']: | |
579 | try: |
|
579 | try: | |
580 | groups = auth['groups'] or [] |
|
580 | groups = auth['groups'] or [] | |
581 | log.debug( |
|
581 | log.debug( | |
582 | 'Performing user_group sync based on set `%s` ' |
|
582 | 'Performing user_group sync based on set `%s` ' | |
583 | 'returned by `%s` plugin', groups, self.name) |
|
583 | 'returned by `%s` plugin', groups, self.name) | |
584 | UserGroupModel().enforce_groups(user, groups, self.name) |
|
584 | UserGroupModel().enforce_groups(user, groups, self.name) | |
585 | except Exception: |
|
585 | except Exception: | |
586 | # for any reason group syncing fails, we should |
|
586 | # for any reason group syncing fails, we should | |
587 | # proceed with login |
|
587 | # proceed with login | |
588 | log.error(traceback.format_exc()) |
|
588 | log.error(traceback.format_exc()) | |
589 |
|
589 | |||
590 | Session().commit() |
|
590 | Session().commit() | |
591 | return auth |
|
591 | return auth | |
592 |
|
592 | |||
593 |
|
593 | |||
594 | class AuthLdapBase(object): |
|
594 | class AuthLdapBase(object): | |
595 |
|
595 | |||
596 | @classmethod |
|
596 | @classmethod | |
597 | def _build_servers(cls, ldap_server_type, ldap_server, port, use_resolver=True): |
|
597 | def _build_servers(cls, ldap_server_type, ldap_server, port, use_resolver=True): | |
598 |
|
598 | |||
599 | def host_resolver(host, port, full_resolve=True): |
|
599 | def host_resolver(host, port, full_resolve=True): | |
600 | """ |
|
600 | """ | |
601 | Main work for this function is to prevent ldap connection issues, |
|
601 | Main work for this function is to prevent ldap connection issues, | |
602 | and detect them early using a "greenified" sockets |
|
602 | and detect them early using a "greenified" sockets | |
603 | """ |
|
603 | """ | |
604 | host = host.strip() |
|
604 | host = host.strip() | |
605 | if not full_resolve: |
|
605 | if not full_resolve: | |
606 | return '{}:{}'.format(host, port) |
|
606 | return '{}:{}'.format(host, port) | |
607 |
|
607 | |||
608 | log.debug('LDAP: Resolving IP for LDAP host `%s`', host) |
|
608 | log.debug('LDAP: Resolving IP for LDAP host `%s`', host) | |
609 | try: |
|
609 | try: | |
610 | ip = socket.gethostbyname(host) |
|
610 | ip = socket.gethostbyname(host) | |
611 | log.debug('LDAP: Got LDAP host `%s` ip %s', host, ip) |
|
611 | log.debug('LDAP: Got LDAP host `%s` ip %s', host, ip) | |
612 | except Exception: |
|
612 | except Exception: | |
613 | raise LdapConnectionError('Failed to resolve host: `{}`'.format(host)) |
|
613 | raise LdapConnectionError('Failed to resolve host: `{}`'.format(host)) | |
614 |
|
614 | |||
615 | log.debug('LDAP: Checking if IP %s is accessible', ip) |
|
615 | log.debug('LDAP: Checking if IP %s is accessible', ip) | |
616 | s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) |
|
616 | s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) | |
617 | try: |
|
617 | try: | |
618 | s.connect((ip, int(port))) |
|
618 | s.connect((ip, int(port))) | |
619 | s.shutdown(socket.SHUT_RD) |
|
619 | s.shutdown(socket.SHUT_RD) | |
620 | log.debug('LDAP: connection to %s successful', ip) |
|
620 | log.debug('LDAP: connection to %s successful', ip) | |
621 | except Exception: |
|
621 | except Exception: | |
622 | raise LdapConnectionError( |
|
622 | raise LdapConnectionError( | |
623 | 'Failed to connect to host: `{}:{}`'.format(host, port)) |
|
623 | 'Failed to connect to host: `{}:{}`'.format(host, port)) | |
624 |
|
624 | |||
625 | return '{}:{}'.format(host, port) |
|
625 | return '{}:{}'.format(host, port) | |
626 |
|
626 | |||
627 | if len(ldap_server) == 1: |
|
627 | if len(ldap_server) == 1: | |
628 | # in case of single server use resolver to detect potential |
|
628 | # in case of single server use resolver to detect potential | |
629 | # connection issues |
|
629 | # connection issues | |
630 | full_resolve = True |
|
630 | full_resolve = True | |
631 | else: |
|
631 | else: | |
632 | full_resolve = False |
|
632 | full_resolve = False | |
633 |
|
633 | |||
634 | return ', '.join( |
|
634 | return ', '.join( | |
635 | ["{}://{}".format( |
|
635 | ["{}://{}".format( | |
636 | ldap_server_type, |
|
636 | ldap_server_type, | |
637 | host_resolver(host, port, full_resolve=use_resolver and full_resolve)) |
|
637 | host_resolver(host, port, full_resolve=use_resolver and full_resolve)) | |
638 | for host in ldap_server]) |
|
638 | for host in ldap_server]) | |
639 |
|
639 | |||
640 | @classmethod |
|
640 | @classmethod | |
641 | def _get_server_list(cls, servers): |
|
641 | def _get_server_list(cls, servers): | |
642 | return map(string.strip, servers.split(',')) |
|
642 | return map(string.strip, servers.split(',')) | |
643 |
|
643 | |||
644 | @classmethod |
|
644 | @classmethod | |
645 | def get_uid(cls, username, server_addresses): |
|
645 | def get_uid(cls, username, server_addresses): | |
646 | uid = username |
|
646 | uid = username | |
647 | for server_addr in server_addresses: |
|
647 | for server_addr in server_addresses: | |
648 | uid = chop_at(username, "@%s" % server_addr) |
|
648 | uid = chop_at(username, "@%s" % server_addr) | |
649 | return uid |
|
649 | return uid | |
650 |
|
650 | |||
651 | @classmethod |
|
651 | @classmethod | |
652 | def validate_username(cls, username): |
|
652 | def validate_username(cls, username): | |
653 | if "," in username: |
|
653 | if "," in username: | |
654 | raise LdapUsernameError( |
|
654 | raise LdapUsernameError( | |
655 | "invalid character `,` in username: `{}`".format(username)) |
|
655 | "invalid character `,` in username: `{}`".format(username)) | |
656 |
|
656 | |||
657 | @classmethod |
|
657 | @classmethod | |
658 | def validate_password(cls, username, password): |
|
658 | def validate_password(cls, username, password): | |
659 | if not password: |
|
659 | if not password: | |
660 | msg = "Authenticating user %s with blank password not allowed" |
|
660 | msg = "Authenticating user %s with blank password not allowed" | |
661 | log.warning(msg, username) |
|
661 | log.warning(msg, username) | |
662 | raise LdapPasswordError(msg) |
|
662 | raise LdapPasswordError(msg) | |
663 |
|
663 | |||
664 |
|
664 | |||
665 | def loadplugin(plugin_id): |
|
665 | def loadplugin(plugin_id): | |
666 | """ |
|
666 | """ | |
667 | Loads and returns an instantiated authentication plugin. |
|
667 | Loads and returns an instantiated authentication plugin. | |
668 | Returns the RhodeCodeAuthPluginBase subclass on success, |
|
668 | Returns the RhodeCodeAuthPluginBase subclass on success, | |
669 | or None on failure. |
|
669 | or None on failure. | |
670 | """ |
|
670 | """ | |
671 | # TODO: Disusing pyramids thread locals to retrieve the registry. |
|
671 | # TODO: Disusing pyramids thread locals to retrieve the registry. | |
672 | authn_registry = get_authn_registry() |
|
672 | authn_registry = get_authn_registry() | |
673 | plugin = authn_registry.get_plugin(plugin_id) |
|
673 | plugin = authn_registry.get_plugin(plugin_id) | |
674 | if plugin is None: |
|
674 | if plugin is None: | |
675 | log.error('Authentication plugin not found: "%s"', plugin_id) |
|
675 | log.error('Authentication plugin not found: "%s"', plugin_id) | |
676 | return plugin |
|
676 | return plugin | |
677 |
|
677 | |||
678 |
|
678 | |||
679 | def get_authn_registry(registry=None): |
|
679 | def get_authn_registry(registry=None): | |
680 | registry = registry or get_current_registry() |
|
680 | registry = registry or get_current_registry() | |
681 | authn_registry = registry.queryUtility(IAuthnPluginRegistry) |
|
681 | authn_registry = registry.queryUtility(IAuthnPluginRegistry) | |
682 | return authn_registry |
|
682 | return authn_registry | |
683 |
|
683 | |||
684 |
|
684 | |||
685 | def authenticate(username, password, environ=None, auth_type=None, |
|
685 | def authenticate(username, password, environ=None, auth_type=None, | |
686 | skip_missing=False, registry=None, acl_repo_name=None): |
|
686 | skip_missing=False, registry=None, acl_repo_name=None): | |
687 | """ |
|
687 | """ | |
688 | Authentication function used for access control, |
|
688 | Authentication function used for access control, | |
689 | It tries to authenticate based on enabled authentication modules. |
|
689 | It tries to authenticate based on enabled authentication modules. | |
690 |
|
690 | |||
691 | :param username: username can be empty for headers auth |
|
691 | :param username: username can be empty for headers auth | |
692 | :param password: password can be empty for headers auth |
|
692 | :param password: password can be empty for headers auth | |
693 | :param environ: environ headers passed for headers auth |
|
693 | :param environ: environ headers passed for headers auth | |
694 | :param auth_type: type of authentication, either `HTTP_TYPE` or `VCS_TYPE` |
|
694 | :param auth_type: type of authentication, either `HTTP_TYPE` or `VCS_TYPE` | |
695 | :param skip_missing: ignores plugins that are in db but not in environment |
|
695 | :param skip_missing: ignores plugins that are in db but not in environment | |
696 | :returns: None if auth failed, plugin_user dict if auth is correct |
|
696 | :returns: None if auth failed, plugin_user dict if auth is correct | |
697 | """ |
|
697 | """ | |
698 | if not auth_type or auth_type not in [HTTP_TYPE, VCS_TYPE]: |
|
698 | if not auth_type or auth_type not in [HTTP_TYPE, VCS_TYPE]: | |
699 | raise ValueError('auth type must be on of http, vcs got "%s" instead' |
|
699 | raise ValueError('auth type must be on of http, vcs got "%s" instead' | |
700 | % auth_type) |
|
700 | % auth_type) | |
701 | headers_only = environ and not (username and password) |
|
701 | headers_only = environ and not (username and password) | |
702 |
|
702 | |||
703 | authn_registry = get_authn_registry(registry) |
|
703 | authn_registry = get_authn_registry(registry) | |
704 |
|
704 | |||
705 | plugins_to_check = authn_registry.get_plugins_for_authentication() |
|
705 | plugins_to_check = authn_registry.get_plugins_for_authentication() | |
706 | log.debug('Starting ordered authentication chain using %s plugins', |
|
706 | log.debug('Starting ordered authentication chain using %s plugins', | |
707 | [x.name for x in plugins_to_check]) |
|
707 | [x.name for x in plugins_to_check]) | |
708 | for plugin in plugins_to_check: |
|
708 | for plugin in plugins_to_check: | |
709 | plugin.set_auth_type(auth_type) |
|
709 | plugin.set_auth_type(auth_type) | |
710 | plugin.set_calling_scope_repo(acl_repo_name) |
|
710 | plugin.set_calling_scope_repo(acl_repo_name) | |
711 |
|
711 | |||
712 | if headers_only and not plugin.is_headers_auth: |
|
712 | if headers_only and not plugin.is_headers_auth: | |
713 | log.debug('Auth type is for headers only and plugin `%s` is not ' |
|
713 | log.debug('Auth type is for headers only and plugin `%s` is not ' | |
714 | 'headers plugin, skipping...', plugin.get_id()) |
|
714 | 'headers plugin, skipping...', plugin.get_id()) | |
715 | continue |
|
715 | continue | |
716 |
|
716 | |||
717 | log.debug('Trying authentication using ** %s **', plugin.get_id()) |
|
717 | log.debug('Trying authentication using ** %s **', plugin.get_id()) | |
718 |
|
718 | |||
719 | # load plugin settings from RhodeCode database |
|
719 | # load plugin settings from RhodeCode database | |
720 | plugin_settings = plugin.get_settings() |
|
720 | plugin_settings = plugin.get_settings() | |
721 | plugin_sanitized_settings = plugin.log_safe_settings(plugin_settings) |
|
721 | plugin_sanitized_settings = plugin.log_safe_settings(plugin_settings) | |
722 | log.debug('Plugin `%s` settings:%s', plugin.get_id(), plugin_sanitized_settings) |
|
722 | log.debug('Plugin `%s` settings:%s', plugin.get_id(), plugin_sanitized_settings) | |
723 |
|
723 | |||
724 | # use plugin's method of user extraction. |
|
724 | # use plugin's method of user extraction. | |
725 | user = plugin.get_user(username, environ=environ, |
|
725 | user = plugin.get_user(username, environ=environ, | |
726 | settings=plugin_settings) |
|
726 | settings=plugin_settings) | |
727 | display_user = user.username if user else username |
|
727 | display_user = user.username if user else username | |
728 | log.debug( |
|
728 | log.debug( | |
729 | 'Plugin %s extracted user is `%s`', plugin.get_id(), display_user) |
|
729 | 'Plugin %s extracted user is `%s`', plugin.get_id(), display_user) | |
730 |
|
730 | |||
731 | if not plugin.allows_authentication_from(user): |
|
731 | if not plugin.allows_authentication_from(user): | |
732 | log.debug('Plugin %s does not accept user `%s` for authentication', |
|
732 | log.debug('Plugin %s does not accept user `%s` for authentication', | |
733 | plugin.get_id(), display_user) |
|
733 | plugin.get_id(), display_user) | |
734 | continue |
|
734 | continue | |
735 | else: |
|
735 | else: | |
736 | log.debug('Plugin %s accepted user `%s` for authentication', |
|
736 | log.debug('Plugin %s accepted user `%s` for authentication', | |
737 | plugin.get_id(), display_user) |
|
737 | plugin.get_id(), display_user) | |
738 |
|
738 | |||
739 | log.info('Authenticating user `%s` using %s plugin', |
|
739 | log.info('Authenticating user `%s` using %s plugin', | |
740 | display_user, plugin.get_id()) |
|
740 | display_user, plugin.get_id()) | |
741 |
|
741 | |||
742 | plugin_cache_active, cache_ttl = plugin.get_ttl_cache(plugin_settings) |
|
742 | plugin_cache_active, cache_ttl = plugin.get_ttl_cache(plugin_settings) | |
743 |
|
743 | |||
744 | log.debug('AUTH_CACHE_TTL for plugin `%s` active: %s (TTL: %s)', |
|
744 | log.debug('AUTH_CACHE_TTL for plugin `%s` active: %s (TTL: %s)', | |
745 | plugin.get_id(), plugin_cache_active, cache_ttl) |
|
745 | plugin.get_id(), plugin_cache_active, cache_ttl) | |
746 |
|
746 | |||
747 | user_id = user.user_id if user else 'no-user' |
|
747 | user_id = user.user_id if user else 'no-user' | |
748 | # don't cache for empty users |
|
748 | # don't cache for empty users | |
749 | plugin_cache_active = plugin_cache_active and user_id |
|
749 | plugin_cache_active = plugin_cache_active and user_id | |
750 | cache_namespace_uid = 'cache_user_auth.{}'.format(user_id) |
|
750 | cache_namespace_uid = 'cache_user_auth.{}'.format(user_id) | |
751 | region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid) |
|
751 | region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid) | |
752 |
|
752 | |||
753 | @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, |
|
753 | @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, | |
754 | expiration_time=cache_ttl, |
|
754 | expiration_time=cache_ttl, | |
755 | condition=plugin_cache_active) |
|
755 | condition=plugin_cache_active) | |
756 | def compute_auth( |
|
756 | def compute_auth( | |
757 | cache_name, plugin_name, username, password): |
|
757 | cache_name, plugin_name, username, password): | |
758 |
|
758 | |||
759 | # _authenticate is a wrapper for .auth() method of plugin. |
|
759 | # _authenticate is a wrapper for .auth() method of plugin. | |
760 | # it checks if .auth() sends proper data. |
|
760 | # it checks if .auth() sends proper data. | |
761 | # For RhodeCodeExternalAuthPlugin it also maps users to |
|
761 | # For RhodeCodeExternalAuthPlugin it also maps users to | |
762 | # Database and maps the attributes returned from .auth() |
|
762 | # Database and maps the attributes returned from .auth() | |
763 | # to RhodeCode database. If this function returns data |
|
763 | # to RhodeCode database. If this function returns data | |
764 | # then auth is correct. |
|
764 | # then auth is correct. | |
765 | log.debug('Running plugin `%s` _authenticate method ' |
|
765 | log.debug('Running plugin `%s` _authenticate method ' | |
766 | 'using username and password', plugin.get_id()) |
|
766 | 'using username and password', plugin.get_id()) | |
767 | return plugin._authenticate( |
|
767 | return plugin._authenticate( | |
768 | user, username, password, plugin_settings, |
|
768 | user, username, password, plugin_settings, | |
769 | environ=environ or {}) |
|
769 | environ=environ or {}) | |
770 |
|
770 | |||
771 | start = time.time() |
|
771 | start = time.time() | |
772 | # for environ based auth, password can be empty, but then the validation is |
|
772 | # for environ based auth, password can be empty, but then the validation is | |
773 | # on the server that fills in the env data needed for authentication |
|
773 | # on the server that fills in the env data needed for authentication | |
774 | plugin_user = compute_auth('auth', plugin.name, username, (password or '')) |
|
774 | plugin_user = compute_auth('auth', plugin.name, username, (password or '')) | |
775 |
|
775 | |||
776 | auth_time = time.time() - start |
|
776 | auth_time = time.time() - start | |
777 | log.debug('Authentication for plugin `%s` completed in %.4fs, ' |
|
777 | log.debug('Authentication for plugin `%s` completed in %.4fs, ' | |
778 | 'expiration time of fetched cache %.1fs.', |
|
778 | 'expiration time of fetched cache %.1fs.', | |
779 | plugin.get_id(), auth_time, cache_ttl) |
|
779 | plugin.get_id(), auth_time, cache_ttl) | |
780 |
|
780 | |||
781 | log.debug('PLUGIN USER DATA: %s', plugin_user) |
|
781 | log.debug('PLUGIN USER DATA: %s', plugin_user) | |
782 |
|
782 | |||
|
783 | statsd = StatsdClient.statsd | |||
|
784 | ||||
783 | if plugin_user: |
|
785 | if plugin_user: | |
784 | log.debug('Plugin returned proper authentication data') |
|
786 | log.debug('Plugin returned proper authentication data') | |
|
787 | if statsd: | |||
|
788 | statsd.incr('rhodecode_login_success_total') | |||
785 | return plugin_user |
|
789 | return plugin_user | |
|
790 | ||||
786 | # we failed to Auth because .auth() method didn't return proper user |
|
791 | # we failed to Auth because .auth() method didn't return proper user | |
787 | log.debug("User `%s` failed to authenticate against %s", |
|
792 | log.debug("User `%s` failed to authenticate against %s", | |
788 | display_user, plugin.get_id()) |
|
793 | display_user, plugin.get_id()) | |
|
794 | if statsd: | |||
|
795 | statsd.incr('rhodecode_login_fail_total') | |||
789 |
|
796 | |||
790 | # case when we failed to authenticate against all defined plugins |
|
797 | # case when we failed to authenticate against all defined plugins | |
791 | return None |
|
798 | return None | |
792 |
|
799 | |||
793 |
|
800 | |||
794 | def chop_at(s, sub, inclusive=False): |
|
801 | def chop_at(s, sub, inclusive=False): | |
795 | """Truncate string ``s`` at the first occurrence of ``sub``. |
|
802 | """Truncate string ``s`` at the first occurrence of ``sub``. | |
796 |
|
803 | |||
797 | If ``inclusive`` is true, truncate just after ``sub`` rather than at it. |
|
804 | If ``inclusive`` is true, truncate just after ``sub`` rather than at it. | |
798 |
|
805 | |||
799 | >>> chop_at("plutocratic brats", "rat") |
|
806 | >>> chop_at("plutocratic brats", "rat") | |
800 | 'plutoc' |
|
807 | 'plutoc' | |
801 | >>> chop_at("plutocratic brats", "rat", True) |
|
808 | >>> chop_at("plutocratic brats", "rat", True) | |
802 | 'plutocrat' |
|
809 | 'plutocrat' | |
803 | """ |
|
810 | """ | |
804 | pos = s.find(sub) |
|
811 | pos = s.find(sub) | |
805 | if pos == -1: |
|
812 | if pos == -1: | |
806 | return s |
|
813 | return s | |
807 | if inclusive: |
|
814 | if inclusive: | |
808 | return s[:pos+len(sub)] |
|
815 | return s[:pos+len(sub)] | |
809 | return s[:pos] |
|
816 | return s[:pos] |
@@ -1,796 +1,796 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import os |
|
21 | import os | |
22 | import sys |
|
22 | import sys | |
23 | import logging |
|
23 | import logging | |
24 | import collections |
|
24 | import collections | |
25 | import tempfile |
|
25 | import tempfile | |
26 | import time |
|
26 | import time | |
27 |
|
27 | |||
28 | from paste.gzipper import make_gzip_middleware |
|
28 | from paste.gzipper import make_gzip_middleware | |
29 | import pyramid.events |
|
29 | import pyramid.events | |
30 | from pyramid.wsgi import wsgiapp |
|
30 | from pyramid.wsgi import wsgiapp | |
31 | from pyramid.authorization import ACLAuthorizationPolicy |
|
31 | from pyramid.authorization import ACLAuthorizationPolicy | |
32 | from pyramid.config import Configurator |
|
32 | from pyramid.config import Configurator | |
33 | from pyramid.settings import asbool, aslist |
|
33 | from pyramid.settings import asbool, aslist | |
34 | from pyramid.httpexceptions import ( |
|
34 | from pyramid.httpexceptions import ( | |
35 | HTTPException, HTTPError, HTTPInternalServerError, HTTPFound, HTTPNotFound) |
|
35 | HTTPException, HTTPError, HTTPInternalServerError, HTTPFound, HTTPNotFound) | |
36 | from pyramid.renderers import render_to_response |
|
36 | from pyramid.renderers import render_to_response | |
37 |
|
37 | |||
38 | from rhodecode.model import meta |
|
38 | from rhodecode.model import meta | |
39 | from rhodecode.config import patches |
|
39 | from rhodecode.config import patches | |
40 | from rhodecode.config import utils as config_utils |
|
40 | from rhodecode.config import utils as config_utils | |
41 | from rhodecode.config.environment import load_pyramid_environment |
|
41 | from rhodecode.config.environment import load_pyramid_environment | |
42 |
|
42 | |||
43 | import rhodecode.events |
|
43 | import rhodecode.events | |
44 | from rhodecode.lib.middleware.vcs import VCSMiddleware |
|
44 | from rhodecode.lib.middleware.vcs import VCSMiddleware | |
45 | from rhodecode.lib.request import Request |
|
45 | from rhodecode.lib.request import Request | |
46 | from rhodecode.lib.vcs import VCSCommunicationError |
|
46 | from rhodecode.lib.vcs import VCSCommunicationError | |
47 | from rhodecode.lib.exceptions import VCSServerUnavailable |
|
47 | from rhodecode.lib.exceptions import VCSServerUnavailable | |
48 | from rhodecode.lib.middleware.appenlight import wrap_in_appenlight_if_enabled |
|
48 | from rhodecode.lib.middleware.appenlight import wrap_in_appenlight_if_enabled | |
49 | from rhodecode.lib.middleware.https_fixup import HttpsFixup |
|
49 | from rhodecode.lib.middleware.https_fixup import HttpsFixup | |
50 | from rhodecode.lib.plugins.utils import register_rhodecode_plugin |
|
50 | from rhodecode.lib.plugins.utils import register_rhodecode_plugin | |
51 | from rhodecode.lib.utils2 import aslist as rhodecode_aslist, AttributeDict |
|
51 | from rhodecode.lib.utils2 import aslist as rhodecode_aslist, AttributeDict | |
52 | from rhodecode.lib.exc_tracking import store_exception |
|
52 | from rhodecode.lib.exc_tracking import store_exception | |
53 | from rhodecode.subscribers import ( |
|
53 | from rhodecode.subscribers import ( | |
54 | scan_repositories_if_enabled, write_js_routes_if_enabled, |
|
54 | scan_repositories_if_enabled, write_js_routes_if_enabled, | |
55 | write_metadata_if_needed, write_usage_data) |
|
55 | write_metadata_if_needed, write_usage_data) | |
56 | from rhodecode.lib.statsd_client import StatsdClient |
|
56 | from rhodecode.lib.statsd_client import StatsdClient | |
57 |
|
57 | |||
58 | log = logging.getLogger(__name__) |
|
58 | log = logging.getLogger(__name__) | |
59 |
|
59 | |||
60 |
|
60 | |||
61 | def is_http_error(response): |
|
61 | def is_http_error(response): | |
62 | # error which should have traceback |
|
62 | # error which should have traceback | |
63 | return response.status_code > 499 |
|
63 | return response.status_code > 499 | |
64 |
|
64 | |||
65 |
|
65 | |||
66 | def should_load_all(): |
|
66 | def should_load_all(): | |
67 | """ |
|
67 | """ | |
68 | Returns if all application components should be loaded. In some cases it's |
|
68 | Returns if all application components should be loaded. In some cases it's | |
69 | desired to skip apps loading for faster shell script execution |
|
69 | desired to skip apps loading for faster shell script execution | |
70 | """ |
|
70 | """ | |
71 | ssh_cmd = os.environ.get('RC_CMD_SSH_WRAPPER') |
|
71 | ssh_cmd = os.environ.get('RC_CMD_SSH_WRAPPER') | |
72 | if ssh_cmd: |
|
72 | if ssh_cmd: | |
73 | return False |
|
73 | return False | |
74 |
|
74 | |||
75 | return True |
|
75 | return True | |
76 |
|
76 | |||
77 |
|
77 | |||
78 | def make_pyramid_app(global_config, **settings): |
|
78 | def make_pyramid_app(global_config, **settings): | |
79 | """ |
|
79 | """ | |
80 | Constructs the WSGI application based on Pyramid. |
|
80 | Constructs the WSGI application based on Pyramid. | |
81 |
|
81 | |||
82 | Specials: |
|
82 | Specials: | |
83 |
|
83 | |||
84 | * The application can also be integrated like a plugin via the call to |
|
84 | * The application can also be integrated like a plugin via the call to | |
85 | `includeme`. This is accompanied with the other utility functions which |
|
85 | `includeme`. This is accompanied with the other utility functions which | |
86 | are called. Changing this should be done with great care to not break |
|
86 | are called. Changing this should be done with great care to not break | |
87 | cases when these fragments are assembled from another place. |
|
87 | cases when these fragments are assembled from another place. | |
88 |
|
88 | |||
89 | """ |
|
89 | """ | |
90 |
|
90 | |||
91 | # Allows to use format style "{ENV_NAME}" placeholders in the configuration. It |
|
91 | # Allows to use format style "{ENV_NAME}" placeholders in the configuration. It | |
92 | # will be replaced by the value of the environment variable "NAME" in this case. |
|
92 | # will be replaced by the value of the environment variable "NAME" in this case. | |
93 | start_time = time.time() |
|
93 | start_time = time.time() | |
94 | log.info('Pyramid app config starting') |
|
94 | log.info('Pyramid app config starting') | |
95 |
|
95 | |||
96 | # init and bootstrap StatsdClient |
|
96 | # init and bootstrap StatsdClient | |
97 | StatsdClient.setup(settings) |
|
97 | StatsdClient.setup(settings) | |
98 |
|
98 | |||
99 | debug = asbool(global_config.get('debug')) |
|
99 | debug = asbool(global_config.get('debug')) | |
100 | if debug: |
|
100 | if debug: | |
101 | enable_debug() |
|
101 | enable_debug() | |
102 |
|
102 | |||
103 | environ = {'ENV_{}'.format(key): value for key, value in os.environ.items()} |
|
103 | environ = {'ENV_{}'.format(key): value for key, value in os.environ.items()} | |
104 |
|
104 | |||
105 | global_config = _substitute_values(global_config, environ) |
|
105 | global_config = _substitute_values(global_config, environ) | |
106 | settings = _substitute_values(settings, environ) |
|
106 | settings = _substitute_values(settings, environ) | |
107 |
|
107 | |||
108 | sanitize_settings_and_apply_defaults(global_config, settings) |
|
108 | sanitize_settings_and_apply_defaults(global_config, settings) | |
109 |
|
109 | |||
110 | config = Configurator(settings=settings) |
|
110 | config = Configurator(settings=settings) | |
111 | # Init our statsd at very start |
|
111 | # Init our statsd at very start | |
112 | config.registry.statsd = StatsdClient.statsd |
|
112 | config.registry.statsd = StatsdClient.statsd | |
113 |
|
113 | |||
114 | # Apply compatibility patches |
|
114 | # Apply compatibility patches | |
115 | patches.inspect_getargspec() |
|
115 | patches.inspect_getargspec() | |
116 |
|
116 | |||
117 | load_pyramid_environment(global_config, settings) |
|
117 | load_pyramid_environment(global_config, settings) | |
118 |
|
118 | |||
119 | # Static file view comes first |
|
119 | # Static file view comes first | |
120 | includeme_first(config) |
|
120 | includeme_first(config) | |
121 |
|
121 | |||
122 | includeme(config) |
|
122 | includeme(config) | |
123 |
|
123 | |||
124 | pyramid_app = config.make_wsgi_app() |
|
124 | pyramid_app = config.make_wsgi_app() | |
125 | pyramid_app = wrap_app_in_wsgi_middlewares(pyramid_app, config) |
|
125 | pyramid_app = wrap_app_in_wsgi_middlewares(pyramid_app, config) | |
126 | pyramid_app.config = config |
|
126 | pyramid_app.config = config | |
127 |
|
127 | |||
128 | config.configure_celery(global_config['__file__']) |
|
128 | config.configure_celery(global_config['__file__']) | |
129 |
|
129 | |||
130 | # creating the app uses a connection - return it after we are done |
|
130 | # creating the app uses a connection - return it after we are done | |
131 | meta.Session.remove() |
|
131 | meta.Session.remove() | |
132 | statsd = StatsdClient.statsd |
|
132 | statsd = StatsdClient.statsd | |
133 |
|
133 | |||
134 | total_time = time.time() - start_time |
|
134 | total_time = time.time() - start_time | |
135 | log.info('Pyramid app `%s` created and configured in %.2fs', |
|
135 | log.info('Pyramid app `%s` created and configured in %.2fs', | |
136 | pyramid_app.func_name, total_time) |
|
136 | pyramid_app.func_name, total_time) | |
137 | if statsd: |
|
137 | if statsd: | |
138 | elapsed_time_ms = 1000.0 * total_time |
|
138 | elapsed_time_ms = 1000.0 * total_time | |
139 | statsd.timing('rhodecode_app_bootstrap_timing', elapsed_time_ms, tags=[ |
|
139 | statsd.timing('rhodecode_app_bootstrap_timing', elapsed_time_ms, tags=[ | |
140 | "pyramid_app:{}".format(pyramid_app.func_name) |
|
140 | "pyramid_app:{}".format(pyramid_app.func_name) | |
141 | ]) |
|
141 | ]) | |
142 | return pyramid_app |
|
142 | return pyramid_app | |
143 |
|
143 | |||
144 |
|
144 | |||
145 | def not_found_view(request): |
|
145 | def not_found_view(request): | |
146 | """ |
|
146 | """ | |
147 | This creates the view which should be registered as not-found-view to |
|
147 | This creates the view which should be registered as not-found-view to | |
148 | pyramid. |
|
148 | pyramid. | |
149 | """ |
|
149 | """ | |
150 |
|
150 | |||
151 | if not getattr(request, 'vcs_call', None): |
|
151 | if not getattr(request, 'vcs_call', None): | |
152 | # handle like regular case with our error_handler |
|
152 | # handle like regular case with our error_handler | |
153 | return error_handler(HTTPNotFound(), request) |
|
153 | return error_handler(HTTPNotFound(), request) | |
154 |
|
154 | |||
155 | # handle not found view as a vcs call |
|
155 | # handle not found view as a vcs call | |
156 | settings = request.registry.settings |
|
156 | settings = request.registry.settings | |
157 | ae_client = getattr(request, 'ae_client', None) |
|
157 | ae_client = getattr(request, 'ae_client', None) | |
158 | vcs_app = VCSMiddleware( |
|
158 | vcs_app = VCSMiddleware( | |
159 | HTTPNotFound(), request.registry, settings, |
|
159 | HTTPNotFound(), request.registry, settings, | |
160 | appenlight_client=ae_client) |
|
160 | appenlight_client=ae_client) | |
161 |
|
161 | |||
162 | return wsgiapp(vcs_app)(None, request) |
|
162 | return wsgiapp(vcs_app)(None, request) | |
163 |
|
163 | |||
164 |
|
164 | |||
165 | def error_handler(exception, request): |
|
165 | def error_handler(exception, request): | |
166 | import rhodecode |
|
166 | import rhodecode | |
167 | from rhodecode.lib import helpers |
|
167 | from rhodecode.lib import helpers | |
168 | from rhodecode.lib.utils2 import str2bool |
|
168 | from rhodecode.lib.utils2 import str2bool | |
169 |
|
169 | |||
170 | rhodecode_title = rhodecode.CONFIG.get('rhodecode_title') or 'RhodeCode' |
|
170 | rhodecode_title = rhodecode.CONFIG.get('rhodecode_title') or 'RhodeCode' | |
171 |
|
171 | |||
172 | base_response = HTTPInternalServerError() |
|
172 | base_response = HTTPInternalServerError() | |
173 | # prefer original exception for the response since it may have headers set |
|
173 | # prefer original exception for the response since it may have headers set | |
174 | if isinstance(exception, HTTPException): |
|
174 | if isinstance(exception, HTTPException): | |
175 | base_response = exception |
|
175 | base_response = exception | |
176 | elif isinstance(exception, VCSCommunicationError): |
|
176 | elif isinstance(exception, VCSCommunicationError): | |
177 | base_response = VCSServerUnavailable() |
|
177 | base_response = VCSServerUnavailable() | |
178 |
|
178 | |||
179 | if is_http_error(base_response): |
|
179 | if is_http_error(base_response): | |
180 | log.exception( |
|
180 | log.exception( | |
181 | 'error occurred handling this request for path: %s', request.path) |
|
181 | 'error occurred handling this request for path: %s', request.path) | |
182 |
|
182 | |||
183 | statsd = request.registry.statsd |
|
183 | statsd = request.registry.statsd | |
184 | if statsd and base_response.status_code > 499: |
|
184 | if statsd and base_response.status_code > 499: | |
185 | statsd.incr('rhodecode_exception', tags=["code:{}".format(base_response.status_code)]) |
|
185 | statsd.incr('rhodecode_exception_total', tags=["code:{}".format(base_response.status_code)]) | |
186 |
|
186 | |||
187 | error_explanation = base_response.explanation or str(base_response) |
|
187 | error_explanation = base_response.explanation or str(base_response) | |
188 | if base_response.status_code == 404: |
|
188 | if base_response.status_code == 404: | |
189 | error_explanation += " Optionally you don't have permission to access this page." |
|
189 | error_explanation += " Optionally you don't have permission to access this page." | |
190 | c = AttributeDict() |
|
190 | c = AttributeDict() | |
191 | c.error_message = base_response.status |
|
191 | c.error_message = base_response.status | |
192 | c.error_explanation = error_explanation |
|
192 | c.error_explanation = error_explanation | |
193 | c.visual = AttributeDict() |
|
193 | c.visual = AttributeDict() | |
194 |
|
194 | |||
195 | c.visual.rhodecode_support_url = ( |
|
195 | c.visual.rhodecode_support_url = ( | |
196 | request.registry.settings.get('rhodecode_support_url') or |
|
196 | request.registry.settings.get('rhodecode_support_url') or | |
197 | request.route_url('rhodecode_support') |
|
197 | request.route_url('rhodecode_support') | |
198 | ) |
|
198 | ) | |
199 | c.redirect_time = 0 |
|
199 | c.redirect_time = 0 | |
200 | c.rhodecode_name = rhodecode_title |
|
200 | c.rhodecode_name = rhodecode_title | |
201 | if not c.rhodecode_name: |
|
201 | if not c.rhodecode_name: | |
202 | c.rhodecode_name = 'Rhodecode' |
|
202 | c.rhodecode_name = 'Rhodecode' | |
203 |
|
203 | |||
204 | c.causes = [] |
|
204 | c.causes = [] | |
205 | if is_http_error(base_response): |
|
205 | if is_http_error(base_response): | |
206 | c.causes.append('Server is overloaded.') |
|
206 | c.causes.append('Server is overloaded.') | |
207 | c.causes.append('Server database connection is lost.') |
|
207 | c.causes.append('Server database connection is lost.') | |
208 | c.causes.append('Server expected unhandled error.') |
|
208 | c.causes.append('Server expected unhandled error.') | |
209 |
|
209 | |||
210 | if hasattr(base_response, 'causes'): |
|
210 | if hasattr(base_response, 'causes'): | |
211 | c.causes = base_response.causes |
|
211 | c.causes = base_response.causes | |
212 |
|
212 | |||
213 | c.messages = helpers.flash.pop_messages(request=request) |
|
213 | c.messages = helpers.flash.pop_messages(request=request) | |
214 |
|
214 | |||
215 | exc_info = sys.exc_info() |
|
215 | exc_info = sys.exc_info() | |
216 | c.exception_id = id(exc_info) |
|
216 | c.exception_id = id(exc_info) | |
217 | c.show_exception_id = isinstance(base_response, VCSServerUnavailable) \ |
|
217 | c.show_exception_id = isinstance(base_response, VCSServerUnavailable) \ | |
218 | or base_response.status_code > 499 |
|
218 | or base_response.status_code > 499 | |
219 | c.exception_id_url = request.route_url( |
|
219 | c.exception_id_url = request.route_url( | |
220 | 'admin_settings_exception_tracker_show', exception_id=c.exception_id) |
|
220 | 'admin_settings_exception_tracker_show', exception_id=c.exception_id) | |
221 |
|
221 | |||
222 | if c.show_exception_id: |
|
222 | if c.show_exception_id: | |
223 | store_exception(c.exception_id, exc_info) |
|
223 | store_exception(c.exception_id, exc_info) | |
224 | c.exception_debug = str2bool(rhodecode.CONFIG.get('debug')) |
|
224 | c.exception_debug = str2bool(rhodecode.CONFIG.get('debug')) | |
225 | c.exception_config_ini = rhodecode.CONFIG.get('__file__') |
|
225 | c.exception_config_ini = rhodecode.CONFIG.get('__file__') | |
226 |
|
226 | |||
227 | response = render_to_response( |
|
227 | response = render_to_response( | |
228 | '/errors/error_document.mako', {'c': c, 'h': helpers}, request=request, |
|
228 | '/errors/error_document.mako', {'c': c, 'h': helpers}, request=request, | |
229 | response=base_response) |
|
229 | response=base_response) | |
230 |
|
230 | |||
231 | return response |
|
231 | return response | |
232 |
|
232 | |||
233 |
|
233 | |||
234 | def includeme_first(config): |
|
234 | def includeme_first(config): | |
235 | # redirect automatic browser favicon.ico requests to correct place |
|
235 | # redirect automatic browser favicon.ico requests to correct place | |
236 | def favicon_redirect(context, request): |
|
236 | def favicon_redirect(context, request): | |
237 | return HTTPFound( |
|
237 | return HTTPFound( | |
238 | request.static_path('rhodecode:public/images/favicon.ico')) |
|
238 | request.static_path('rhodecode:public/images/favicon.ico')) | |
239 |
|
239 | |||
240 | config.add_view(favicon_redirect, route_name='favicon') |
|
240 | config.add_view(favicon_redirect, route_name='favicon') | |
241 | config.add_route('favicon', '/favicon.ico') |
|
241 | config.add_route('favicon', '/favicon.ico') | |
242 |
|
242 | |||
243 | def robots_redirect(context, request): |
|
243 | def robots_redirect(context, request): | |
244 | return HTTPFound( |
|
244 | return HTTPFound( | |
245 | request.static_path('rhodecode:public/robots.txt')) |
|
245 | request.static_path('rhodecode:public/robots.txt')) | |
246 |
|
246 | |||
247 | config.add_view(robots_redirect, route_name='robots') |
|
247 | config.add_view(robots_redirect, route_name='robots') | |
248 | config.add_route('robots', '/robots.txt') |
|
248 | config.add_route('robots', '/robots.txt') | |
249 |
|
249 | |||
250 | config.add_static_view( |
|
250 | config.add_static_view( | |
251 | '_static/deform', 'deform:static') |
|
251 | '_static/deform', 'deform:static') | |
252 | config.add_static_view( |
|
252 | config.add_static_view( | |
253 | '_static/rhodecode', path='rhodecode:public', cache_max_age=3600 * 24) |
|
253 | '_static/rhodecode', path='rhodecode:public', cache_max_age=3600 * 24) | |
254 |
|
254 | |||
255 |
|
255 | |||
256 | def includeme(config, auth_resources=None): |
|
256 | def includeme(config, auth_resources=None): | |
257 | from rhodecode.lib.celerylib.loader import configure_celery |
|
257 | from rhodecode.lib.celerylib.loader import configure_celery | |
258 | log.debug('Initializing main includeme from %s', os.path.basename(__file__)) |
|
258 | log.debug('Initializing main includeme from %s', os.path.basename(__file__)) | |
259 | settings = config.registry.settings |
|
259 | settings = config.registry.settings | |
260 | config.set_request_factory(Request) |
|
260 | config.set_request_factory(Request) | |
261 |
|
261 | |||
262 | # plugin information |
|
262 | # plugin information | |
263 | config.registry.rhodecode_plugins = collections.OrderedDict() |
|
263 | config.registry.rhodecode_plugins = collections.OrderedDict() | |
264 |
|
264 | |||
265 | config.add_directive( |
|
265 | config.add_directive( | |
266 | 'register_rhodecode_plugin', register_rhodecode_plugin) |
|
266 | 'register_rhodecode_plugin', register_rhodecode_plugin) | |
267 |
|
267 | |||
268 | config.add_directive('configure_celery', configure_celery) |
|
268 | config.add_directive('configure_celery', configure_celery) | |
269 |
|
269 | |||
270 | if asbool(settings.get('appenlight', 'false')): |
|
270 | if asbool(settings.get('appenlight', 'false')): | |
271 | config.include('appenlight_client.ext.pyramid_tween') |
|
271 | config.include('appenlight_client.ext.pyramid_tween') | |
272 |
|
272 | |||
273 | load_all = should_load_all() |
|
273 | load_all = should_load_all() | |
274 |
|
274 | |||
275 | # Includes which are required. The application would fail without them. |
|
275 | # Includes which are required. The application would fail without them. | |
276 | config.include('pyramid_mako') |
|
276 | config.include('pyramid_mako') | |
277 | config.include('rhodecode.lib.rc_beaker') |
|
277 | config.include('rhodecode.lib.rc_beaker') | |
278 | config.include('rhodecode.lib.rc_cache') |
|
278 | config.include('rhodecode.lib.rc_cache') | |
279 | config.include('rhodecode.apps._base.navigation') |
|
279 | config.include('rhodecode.apps._base.navigation') | |
280 | config.include('rhodecode.apps._base.subscribers') |
|
280 | config.include('rhodecode.apps._base.subscribers') | |
281 | config.include('rhodecode.tweens') |
|
281 | config.include('rhodecode.tweens') | |
282 | config.include('rhodecode.authentication') |
|
282 | config.include('rhodecode.authentication') | |
283 |
|
283 | |||
284 | if load_all: |
|
284 | if load_all: | |
285 | ce_auth_resources = [ |
|
285 | ce_auth_resources = [ | |
286 | 'rhodecode.authentication.plugins.auth_crowd', |
|
286 | 'rhodecode.authentication.plugins.auth_crowd', | |
287 | 'rhodecode.authentication.plugins.auth_headers', |
|
287 | 'rhodecode.authentication.plugins.auth_headers', | |
288 | 'rhodecode.authentication.plugins.auth_jasig_cas', |
|
288 | 'rhodecode.authentication.plugins.auth_jasig_cas', | |
289 | 'rhodecode.authentication.plugins.auth_ldap', |
|
289 | 'rhodecode.authentication.plugins.auth_ldap', | |
290 | 'rhodecode.authentication.plugins.auth_pam', |
|
290 | 'rhodecode.authentication.plugins.auth_pam', | |
291 | 'rhodecode.authentication.plugins.auth_rhodecode', |
|
291 | 'rhodecode.authentication.plugins.auth_rhodecode', | |
292 | 'rhodecode.authentication.plugins.auth_token', |
|
292 | 'rhodecode.authentication.plugins.auth_token', | |
293 | ] |
|
293 | ] | |
294 |
|
294 | |||
295 | # load CE authentication plugins |
|
295 | # load CE authentication plugins | |
296 |
|
296 | |||
297 | if auth_resources: |
|
297 | if auth_resources: | |
298 | ce_auth_resources.extend(auth_resources) |
|
298 | ce_auth_resources.extend(auth_resources) | |
299 |
|
299 | |||
300 | for resource in ce_auth_resources: |
|
300 | for resource in ce_auth_resources: | |
301 | config.include(resource) |
|
301 | config.include(resource) | |
302 |
|
302 | |||
303 | # Auto discover authentication plugins and include their configuration. |
|
303 | # Auto discover authentication plugins and include their configuration. | |
304 | if asbool(settings.get('auth_plugin.import_legacy_plugins', 'true')): |
|
304 | if asbool(settings.get('auth_plugin.import_legacy_plugins', 'true')): | |
305 | from rhodecode.authentication import discover_legacy_plugins |
|
305 | from rhodecode.authentication import discover_legacy_plugins | |
306 | discover_legacy_plugins(config) |
|
306 | discover_legacy_plugins(config) | |
307 |
|
307 | |||
308 | # apps |
|
308 | # apps | |
309 | if load_all: |
|
309 | if load_all: | |
310 | config.include('rhodecode.api') |
|
310 | config.include('rhodecode.api') | |
311 | config.include('rhodecode.apps._base') |
|
311 | config.include('rhodecode.apps._base') | |
312 | config.include('rhodecode.apps.hovercards') |
|
312 | config.include('rhodecode.apps.hovercards') | |
313 | config.include('rhodecode.apps.ops') |
|
313 | config.include('rhodecode.apps.ops') | |
314 | config.include('rhodecode.apps.channelstream') |
|
314 | config.include('rhodecode.apps.channelstream') | |
315 | config.include('rhodecode.apps.file_store') |
|
315 | config.include('rhodecode.apps.file_store') | |
316 | config.include('rhodecode.apps.admin') |
|
316 | config.include('rhodecode.apps.admin') | |
317 | config.include('rhodecode.apps.login') |
|
317 | config.include('rhodecode.apps.login') | |
318 | config.include('rhodecode.apps.home') |
|
318 | config.include('rhodecode.apps.home') | |
319 | config.include('rhodecode.apps.journal') |
|
319 | config.include('rhodecode.apps.journal') | |
320 |
|
320 | |||
321 | config.include('rhodecode.apps.repository') |
|
321 | config.include('rhodecode.apps.repository') | |
322 | config.include('rhodecode.apps.repo_group') |
|
322 | config.include('rhodecode.apps.repo_group') | |
323 | config.include('rhodecode.apps.user_group') |
|
323 | config.include('rhodecode.apps.user_group') | |
324 | config.include('rhodecode.apps.search') |
|
324 | config.include('rhodecode.apps.search') | |
325 | config.include('rhodecode.apps.user_profile') |
|
325 | config.include('rhodecode.apps.user_profile') | |
326 | config.include('rhodecode.apps.user_group_profile') |
|
326 | config.include('rhodecode.apps.user_group_profile') | |
327 | config.include('rhodecode.apps.my_account') |
|
327 | config.include('rhodecode.apps.my_account') | |
328 | config.include('rhodecode.apps.gist') |
|
328 | config.include('rhodecode.apps.gist') | |
329 |
|
329 | |||
330 | config.include('rhodecode.apps.svn_support') |
|
330 | config.include('rhodecode.apps.svn_support') | |
331 | config.include('rhodecode.apps.ssh_support') |
|
331 | config.include('rhodecode.apps.ssh_support') | |
332 | config.include('rhodecode.apps.debug_style') |
|
332 | config.include('rhodecode.apps.debug_style') | |
333 |
|
333 | |||
334 | if load_all: |
|
334 | if load_all: | |
335 | config.include('rhodecode.integrations') |
|
335 | config.include('rhodecode.integrations') | |
336 |
|
336 | |||
337 | config.add_route('rhodecode_support', 'https://rhodecode.com/help/', static=True) |
|
337 | config.add_route('rhodecode_support', 'https://rhodecode.com/help/', static=True) | |
338 | config.add_translation_dirs('rhodecode:i18n/') |
|
338 | config.add_translation_dirs('rhodecode:i18n/') | |
339 | settings['default_locale_name'] = settings.get('lang', 'en') |
|
339 | settings['default_locale_name'] = settings.get('lang', 'en') | |
340 |
|
340 | |||
341 | # Add subscribers. |
|
341 | # Add subscribers. | |
342 | if load_all: |
|
342 | if load_all: | |
343 | config.add_subscriber(scan_repositories_if_enabled, |
|
343 | config.add_subscriber(scan_repositories_if_enabled, | |
344 | pyramid.events.ApplicationCreated) |
|
344 | pyramid.events.ApplicationCreated) | |
345 | config.add_subscriber(write_metadata_if_needed, |
|
345 | config.add_subscriber(write_metadata_if_needed, | |
346 | pyramid.events.ApplicationCreated) |
|
346 | pyramid.events.ApplicationCreated) | |
347 | config.add_subscriber(write_usage_data, |
|
347 | config.add_subscriber(write_usage_data, | |
348 | pyramid.events.ApplicationCreated) |
|
348 | pyramid.events.ApplicationCreated) | |
349 | config.add_subscriber(write_js_routes_if_enabled, |
|
349 | config.add_subscriber(write_js_routes_if_enabled, | |
350 | pyramid.events.ApplicationCreated) |
|
350 | pyramid.events.ApplicationCreated) | |
351 |
|
351 | |||
352 | # request custom methods |
|
352 | # request custom methods | |
353 | config.add_request_method( |
|
353 | config.add_request_method( | |
354 | 'rhodecode.lib.partial_renderer.get_partial_renderer', |
|
354 | 'rhodecode.lib.partial_renderer.get_partial_renderer', | |
355 | 'get_partial_renderer') |
|
355 | 'get_partial_renderer') | |
356 |
|
356 | |||
357 | config.add_request_method( |
|
357 | config.add_request_method( | |
358 | 'rhodecode.lib.request_counter.get_request_counter', |
|
358 | 'rhodecode.lib.request_counter.get_request_counter', | |
359 | 'request_count') |
|
359 | 'request_count') | |
360 |
|
360 | |||
361 | # Set the authorization policy. |
|
361 | # Set the authorization policy. | |
362 | authz_policy = ACLAuthorizationPolicy() |
|
362 | authz_policy = ACLAuthorizationPolicy() | |
363 | config.set_authorization_policy(authz_policy) |
|
363 | config.set_authorization_policy(authz_policy) | |
364 |
|
364 | |||
365 | # Set the default renderer for HTML templates to mako. |
|
365 | # Set the default renderer for HTML templates to mako. | |
366 | config.add_mako_renderer('.html') |
|
366 | config.add_mako_renderer('.html') | |
367 |
|
367 | |||
368 | config.add_renderer( |
|
368 | config.add_renderer( | |
369 | name='json_ext', |
|
369 | name='json_ext', | |
370 | factory='rhodecode.lib.ext_json_renderer.pyramid_ext_json') |
|
370 | factory='rhodecode.lib.ext_json_renderer.pyramid_ext_json') | |
371 |
|
371 | |||
372 | config.add_renderer( |
|
372 | config.add_renderer( | |
373 | name='string_html', |
|
373 | name='string_html', | |
374 | factory='rhodecode.lib.string_renderer.html') |
|
374 | factory='rhodecode.lib.string_renderer.html') | |
375 |
|
375 | |||
376 | # include RhodeCode plugins |
|
376 | # include RhodeCode plugins | |
377 | includes = aslist(settings.get('rhodecode.includes', [])) |
|
377 | includes = aslist(settings.get('rhodecode.includes', [])) | |
378 | for inc in includes: |
|
378 | for inc in includes: | |
379 | config.include(inc) |
|
379 | config.include(inc) | |
380 |
|
380 | |||
381 | # custom not found view, if our pyramid app doesn't know how to handle |
|
381 | # custom not found view, if our pyramid app doesn't know how to handle | |
382 | # the request pass it to potential VCS handling ap |
|
382 | # the request pass it to potential VCS handling ap | |
383 | config.add_notfound_view(not_found_view) |
|
383 | config.add_notfound_view(not_found_view) | |
384 | if not settings.get('debugtoolbar.enabled', False): |
|
384 | if not settings.get('debugtoolbar.enabled', False): | |
385 | # disabled debugtoolbar handle all exceptions via the error_handlers |
|
385 | # disabled debugtoolbar handle all exceptions via the error_handlers | |
386 | config.add_view(error_handler, context=Exception) |
|
386 | config.add_view(error_handler, context=Exception) | |
387 |
|
387 | |||
388 | # all errors including 403/404/50X |
|
388 | # all errors including 403/404/50X | |
389 | config.add_view(error_handler, context=HTTPError) |
|
389 | config.add_view(error_handler, context=HTTPError) | |
390 |
|
390 | |||
391 |
|
391 | |||
392 | def wrap_app_in_wsgi_middlewares(pyramid_app, config): |
|
392 | def wrap_app_in_wsgi_middlewares(pyramid_app, config): | |
393 | """ |
|
393 | """ | |
394 | Apply outer WSGI middlewares around the application. |
|
394 | Apply outer WSGI middlewares around the application. | |
395 | """ |
|
395 | """ | |
396 | registry = config.registry |
|
396 | registry = config.registry | |
397 | settings = registry.settings |
|
397 | settings = registry.settings | |
398 |
|
398 | |||
399 | # enable https redirects based on HTTP_X_URL_SCHEME set by proxy |
|
399 | # enable https redirects based on HTTP_X_URL_SCHEME set by proxy | |
400 | pyramid_app = HttpsFixup(pyramid_app, settings) |
|
400 | pyramid_app = HttpsFixup(pyramid_app, settings) | |
401 |
|
401 | |||
402 | pyramid_app, _ae_client = wrap_in_appenlight_if_enabled( |
|
402 | pyramid_app, _ae_client = wrap_in_appenlight_if_enabled( | |
403 | pyramid_app, settings) |
|
403 | pyramid_app, settings) | |
404 | registry.ae_client = _ae_client |
|
404 | registry.ae_client = _ae_client | |
405 |
|
405 | |||
406 | if settings['gzip_responses']: |
|
406 | if settings['gzip_responses']: | |
407 | pyramid_app = make_gzip_middleware( |
|
407 | pyramid_app = make_gzip_middleware( | |
408 | pyramid_app, settings, compress_level=1) |
|
408 | pyramid_app, settings, compress_level=1) | |
409 |
|
409 | |||
410 | # this should be the outer most middleware in the wsgi stack since |
|
410 | # this should be the outer most middleware in the wsgi stack since | |
411 | # middleware like Routes make database calls |
|
411 | # middleware like Routes make database calls | |
412 | def pyramid_app_with_cleanup(environ, start_response): |
|
412 | def pyramid_app_with_cleanup(environ, start_response): | |
413 | try: |
|
413 | try: | |
414 | return pyramid_app(environ, start_response) |
|
414 | return pyramid_app(environ, start_response) | |
415 | finally: |
|
415 | finally: | |
416 | # Dispose current database session and rollback uncommitted |
|
416 | # Dispose current database session and rollback uncommitted | |
417 | # transactions. |
|
417 | # transactions. | |
418 | meta.Session.remove() |
|
418 | meta.Session.remove() | |
419 |
|
419 | |||
420 | # In a single threaded mode server, on non sqlite db we should have |
|
420 | # In a single threaded mode server, on non sqlite db we should have | |
421 | # '0 Current Checked out connections' at the end of a request, |
|
421 | # '0 Current Checked out connections' at the end of a request, | |
422 | # if not, then something, somewhere is leaving a connection open |
|
422 | # if not, then something, somewhere is leaving a connection open | |
423 | pool = meta.Base.metadata.bind.engine.pool |
|
423 | pool = meta.Base.metadata.bind.engine.pool | |
424 | log.debug('sa pool status: %s', pool.status()) |
|
424 | log.debug('sa pool status: %s', pool.status()) | |
425 | log.debug('Request processing finalized') |
|
425 | log.debug('Request processing finalized') | |
426 |
|
426 | |||
427 | return pyramid_app_with_cleanup |
|
427 | return pyramid_app_with_cleanup | |
428 |
|
428 | |||
429 |
|
429 | |||
430 | def sanitize_settings_and_apply_defaults(global_config, settings): |
|
430 | def sanitize_settings_and_apply_defaults(global_config, settings): | |
431 | """ |
|
431 | """ | |
432 | Applies settings defaults and does all type conversion. |
|
432 | Applies settings defaults and does all type conversion. | |
433 |
|
433 | |||
434 | We would move all settings parsing and preparation into this place, so that |
|
434 | We would move all settings parsing and preparation into this place, so that | |
435 | we have only one place left which deals with this part. The remaining parts |
|
435 | we have only one place left which deals with this part. The remaining parts | |
436 | of the application would start to rely fully on well prepared settings. |
|
436 | of the application would start to rely fully on well prepared settings. | |
437 |
|
437 | |||
438 | This piece would later be split up per topic to avoid a big fat monster |
|
438 | This piece would later be split up per topic to avoid a big fat monster | |
439 | function. |
|
439 | function. | |
440 | """ |
|
440 | """ | |
441 |
|
441 | |||
442 | settings.setdefault('rhodecode.edition', 'Community Edition') |
|
442 | settings.setdefault('rhodecode.edition', 'Community Edition') | |
443 | settings.setdefault('rhodecode.edition_id', 'CE') |
|
443 | settings.setdefault('rhodecode.edition_id', 'CE') | |
444 |
|
444 | |||
445 | if 'mako.default_filters' not in settings: |
|
445 | if 'mako.default_filters' not in settings: | |
446 | # set custom default filters if we don't have it defined |
|
446 | # set custom default filters if we don't have it defined | |
447 | settings['mako.imports'] = 'from rhodecode.lib.base import h_filter' |
|
447 | settings['mako.imports'] = 'from rhodecode.lib.base import h_filter' | |
448 | settings['mako.default_filters'] = 'h_filter' |
|
448 | settings['mako.default_filters'] = 'h_filter' | |
449 |
|
449 | |||
450 | if 'mako.directories' not in settings: |
|
450 | if 'mako.directories' not in settings: | |
451 | mako_directories = settings.setdefault('mako.directories', [ |
|
451 | mako_directories = settings.setdefault('mako.directories', [ | |
452 | # Base templates of the original application |
|
452 | # Base templates of the original application | |
453 | 'rhodecode:templates', |
|
453 | 'rhodecode:templates', | |
454 | ]) |
|
454 | ]) | |
455 | log.debug( |
|
455 | log.debug( | |
456 | "Using the following Mako template directories: %s", |
|
456 | "Using the following Mako template directories: %s", | |
457 | mako_directories) |
|
457 | mako_directories) | |
458 |
|
458 | |||
459 | # NOTE(marcink): fix redis requirement for schema of connection since 3.X |
|
459 | # NOTE(marcink): fix redis requirement for schema of connection since 3.X | |
460 | if 'beaker.session.type' in settings and settings['beaker.session.type'] == 'ext:redis': |
|
460 | if 'beaker.session.type' in settings and settings['beaker.session.type'] == 'ext:redis': | |
461 | raw_url = settings['beaker.session.url'] |
|
461 | raw_url = settings['beaker.session.url'] | |
462 | if not raw_url.startswith(('redis://', 'rediss://', 'unix://')): |
|
462 | if not raw_url.startswith(('redis://', 'rediss://', 'unix://')): | |
463 | settings['beaker.session.url'] = 'redis://' + raw_url |
|
463 | settings['beaker.session.url'] = 'redis://' + raw_url | |
464 |
|
464 | |||
465 | # Default includes, possible to change as a user |
|
465 | # Default includes, possible to change as a user | |
466 | pyramid_includes = settings.setdefault('pyramid.includes', []) |
|
466 | pyramid_includes = settings.setdefault('pyramid.includes', []) | |
467 | log.debug( |
|
467 | log.debug( | |
468 | "Using the following pyramid.includes: %s", |
|
468 | "Using the following pyramid.includes: %s", | |
469 | pyramid_includes) |
|
469 | pyramid_includes) | |
470 |
|
470 | |||
471 | # TODO: johbo: Re-think this, usually the call to config.include |
|
471 | # TODO: johbo: Re-think this, usually the call to config.include | |
472 | # should allow to pass in a prefix. |
|
472 | # should allow to pass in a prefix. | |
473 | settings.setdefault('rhodecode.api.url', '/_admin/api') |
|
473 | settings.setdefault('rhodecode.api.url', '/_admin/api') | |
474 | settings.setdefault('__file__', global_config.get('__file__')) |
|
474 | settings.setdefault('__file__', global_config.get('__file__')) | |
475 |
|
475 | |||
476 | # Sanitize generic settings. |
|
476 | # Sanitize generic settings. | |
477 | _list_setting(settings, 'default_encoding', 'UTF-8') |
|
477 | _list_setting(settings, 'default_encoding', 'UTF-8') | |
478 | _bool_setting(settings, 'is_test', 'false') |
|
478 | _bool_setting(settings, 'is_test', 'false') | |
479 | _bool_setting(settings, 'gzip_responses', 'false') |
|
479 | _bool_setting(settings, 'gzip_responses', 'false') | |
480 |
|
480 | |||
481 | # Call split out functions that sanitize settings for each topic. |
|
481 | # Call split out functions that sanitize settings for each topic. | |
482 | _sanitize_appenlight_settings(settings) |
|
482 | _sanitize_appenlight_settings(settings) | |
483 | _sanitize_vcs_settings(settings) |
|
483 | _sanitize_vcs_settings(settings) | |
484 | _sanitize_cache_settings(settings) |
|
484 | _sanitize_cache_settings(settings) | |
485 |
|
485 | |||
486 | # configure instance id |
|
486 | # configure instance id | |
487 | config_utils.set_instance_id(settings) |
|
487 | config_utils.set_instance_id(settings) | |
488 |
|
488 | |||
489 | return settings |
|
489 | return settings | |
490 |
|
490 | |||
491 |
|
491 | |||
492 | def enable_debug(): |
|
492 | def enable_debug(): | |
493 | """ |
|
493 | """ | |
494 | Helper to enable debug on running instance |
|
494 | Helper to enable debug on running instance | |
495 | :return: |
|
495 | :return: | |
496 | """ |
|
496 | """ | |
497 | import tempfile |
|
497 | import tempfile | |
498 | import textwrap |
|
498 | import textwrap | |
499 | import logging.config |
|
499 | import logging.config | |
500 |
|
500 | |||
501 | ini_template = textwrap.dedent(""" |
|
501 | ini_template = textwrap.dedent(""" | |
502 | ##################################### |
|
502 | ##################################### | |
503 | ### DEBUG LOGGING CONFIGURATION #### |
|
503 | ### DEBUG LOGGING CONFIGURATION #### | |
504 | ##################################### |
|
504 | ##################################### | |
505 | [loggers] |
|
505 | [loggers] | |
506 | keys = root, sqlalchemy, beaker, celery, rhodecode, ssh_wrapper |
|
506 | keys = root, sqlalchemy, beaker, celery, rhodecode, ssh_wrapper | |
507 |
|
507 | |||
508 | [handlers] |
|
508 | [handlers] | |
509 | keys = console, console_sql |
|
509 | keys = console, console_sql | |
510 |
|
510 | |||
511 | [formatters] |
|
511 | [formatters] | |
512 | keys = generic, color_formatter, color_formatter_sql |
|
512 | keys = generic, color_formatter, color_formatter_sql | |
513 |
|
513 | |||
514 | ############# |
|
514 | ############# | |
515 | ## LOGGERS ## |
|
515 | ## LOGGERS ## | |
516 | ############# |
|
516 | ############# | |
517 | [logger_root] |
|
517 | [logger_root] | |
518 | level = NOTSET |
|
518 | level = NOTSET | |
519 | handlers = console |
|
519 | handlers = console | |
520 |
|
520 | |||
521 | [logger_sqlalchemy] |
|
521 | [logger_sqlalchemy] | |
522 | level = INFO |
|
522 | level = INFO | |
523 | handlers = console_sql |
|
523 | handlers = console_sql | |
524 | qualname = sqlalchemy.engine |
|
524 | qualname = sqlalchemy.engine | |
525 | propagate = 0 |
|
525 | propagate = 0 | |
526 |
|
526 | |||
527 | [logger_beaker] |
|
527 | [logger_beaker] | |
528 | level = DEBUG |
|
528 | level = DEBUG | |
529 | handlers = |
|
529 | handlers = | |
530 | qualname = beaker.container |
|
530 | qualname = beaker.container | |
531 | propagate = 1 |
|
531 | propagate = 1 | |
532 |
|
532 | |||
533 | [logger_rhodecode] |
|
533 | [logger_rhodecode] | |
534 | level = DEBUG |
|
534 | level = DEBUG | |
535 | handlers = |
|
535 | handlers = | |
536 | qualname = rhodecode |
|
536 | qualname = rhodecode | |
537 | propagate = 1 |
|
537 | propagate = 1 | |
538 |
|
538 | |||
539 | [logger_ssh_wrapper] |
|
539 | [logger_ssh_wrapper] | |
540 | level = DEBUG |
|
540 | level = DEBUG | |
541 | handlers = |
|
541 | handlers = | |
542 | qualname = ssh_wrapper |
|
542 | qualname = ssh_wrapper | |
543 | propagate = 1 |
|
543 | propagate = 1 | |
544 |
|
544 | |||
545 | [logger_celery] |
|
545 | [logger_celery] | |
546 | level = DEBUG |
|
546 | level = DEBUG | |
547 | handlers = |
|
547 | handlers = | |
548 | qualname = celery |
|
548 | qualname = celery | |
549 |
|
549 | |||
550 |
|
550 | |||
551 | ############## |
|
551 | ############## | |
552 | ## HANDLERS ## |
|
552 | ## HANDLERS ## | |
553 | ############## |
|
553 | ############## | |
554 |
|
554 | |||
555 | [handler_console] |
|
555 | [handler_console] | |
556 | class = StreamHandler |
|
556 | class = StreamHandler | |
557 | args = (sys.stderr, ) |
|
557 | args = (sys.stderr, ) | |
558 | level = DEBUG |
|
558 | level = DEBUG | |
559 | formatter = color_formatter |
|
559 | formatter = color_formatter | |
560 |
|
560 | |||
561 | [handler_console_sql] |
|
561 | [handler_console_sql] | |
562 | # "level = DEBUG" logs SQL queries and results. |
|
562 | # "level = DEBUG" logs SQL queries and results. | |
563 | # "level = INFO" logs SQL queries. |
|
563 | # "level = INFO" logs SQL queries. | |
564 | # "level = WARN" logs neither. (Recommended for production systems.) |
|
564 | # "level = WARN" logs neither. (Recommended for production systems.) | |
565 | class = StreamHandler |
|
565 | class = StreamHandler | |
566 | args = (sys.stderr, ) |
|
566 | args = (sys.stderr, ) | |
567 | level = WARN |
|
567 | level = WARN | |
568 | formatter = color_formatter_sql |
|
568 | formatter = color_formatter_sql | |
569 |
|
569 | |||
570 | ################ |
|
570 | ################ | |
571 | ## FORMATTERS ## |
|
571 | ## FORMATTERS ## | |
572 | ################ |
|
572 | ################ | |
573 |
|
573 | |||
574 | [formatter_generic] |
|
574 | [formatter_generic] | |
575 | class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter |
|
575 | class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter | |
576 | format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s | %(req_id)s |
|
576 | format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s | %(req_id)s | |
577 | datefmt = %Y-%m-%d %H:%M:%S |
|
577 | datefmt = %Y-%m-%d %H:%M:%S | |
578 |
|
578 | |||
579 | [formatter_color_formatter] |
|
579 | [formatter_color_formatter] | |
580 | class = rhodecode.lib.logging_formatter.ColorRequestTrackingFormatter |
|
580 | class = rhodecode.lib.logging_formatter.ColorRequestTrackingFormatter | |
581 | format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s | %(req_id)s |
|
581 | format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s | %(req_id)s | |
582 | datefmt = %Y-%m-%d %H:%M:%S |
|
582 | datefmt = %Y-%m-%d %H:%M:%S | |
583 |
|
583 | |||
584 | [formatter_color_formatter_sql] |
|
584 | [formatter_color_formatter_sql] | |
585 | class = rhodecode.lib.logging_formatter.ColorFormatterSql |
|
585 | class = rhodecode.lib.logging_formatter.ColorFormatterSql | |
586 | format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s |
|
586 | format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s | |
587 | datefmt = %Y-%m-%d %H:%M:%S |
|
587 | datefmt = %Y-%m-%d %H:%M:%S | |
588 | """) |
|
588 | """) | |
589 |
|
589 | |||
590 | with tempfile.NamedTemporaryFile(prefix='rc_debug_logging_', suffix='.ini', |
|
590 | with tempfile.NamedTemporaryFile(prefix='rc_debug_logging_', suffix='.ini', | |
591 | delete=False) as f: |
|
591 | delete=False) as f: | |
592 | log.info('Saved Temporary DEBUG config at %s', f.name) |
|
592 | log.info('Saved Temporary DEBUG config at %s', f.name) | |
593 | f.write(ini_template) |
|
593 | f.write(ini_template) | |
594 |
|
594 | |||
595 | logging.config.fileConfig(f.name) |
|
595 | logging.config.fileConfig(f.name) | |
596 | log.debug('DEBUG MODE ON') |
|
596 | log.debug('DEBUG MODE ON') | |
597 | os.remove(f.name) |
|
597 | os.remove(f.name) | |
598 |
|
598 | |||
599 |
|
599 | |||
600 | def _sanitize_appenlight_settings(settings): |
|
600 | def _sanitize_appenlight_settings(settings): | |
601 | _bool_setting(settings, 'appenlight', 'false') |
|
601 | _bool_setting(settings, 'appenlight', 'false') | |
602 |
|
602 | |||
603 |
|
603 | |||
604 | def _sanitize_vcs_settings(settings): |
|
604 | def _sanitize_vcs_settings(settings): | |
605 | """ |
|
605 | """ | |
606 | Applies settings defaults and does type conversion for all VCS related |
|
606 | Applies settings defaults and does type conversion for all VCS related | |
607 | settings. |
|
607 | settings. | |
608 | """ |
|
608 | """ | |
609 | _string_setting(settings, 'vcs.svn.compatible_version', '') |
|
609 | _string_setting(settings, 'vcs.svn.compatible_version', '') | |
610 | _string_setting(settings, 'vcs.hooks.protocol', 'http') |
|
610 | _string_setting(settings, 'vcs.hooks.protocol', 'http') | |
611 | _string_setting(settings, 'vcs.hooks.host', '127.0.0.1') |
|
611 | _string_setting(settings, 'vcs.hooks.host', '127.0.0.1') | |
612 | _string_setting(settings, 'vcs.scm_app_implementation', 'http') |
|
612 | _string_setting(settings, 'vcs.scm_app_implementation', 'http') | |
613 | _string_setting(settings, 'vcs.server', '') |
|
613 | _string_setting(settings, 'vcs.server', '') | |
614 | _string_setting(settings, 'vcs.server.protocol', 'http') |
|
614 | _string_setting(settings, 'vcs.server.protocol', 'http') | |
615 | _bool_setting(settings, 'startup.import_repos', 'false') |
|
615 | _bool_setting(settings, 'startup.import_repos', 'false') | |
616 | _bool_setting(settings, 'vcs.hooks.direct_calls', 'false') |
|
616 | _bool_setting(settings, 'vcs.hooks.direct_calls', 'false') | |
617 | _bool_setting(settings, 'vcs.server.enable', 'true') |
|
617 | _bool_setting(settings, 'vcs.server.enable', 'true') | |
618 | _bool_setting(settings, 'vcs.start_server', 'false') |
|
618 | _bool_setting(settings, 'vcs.start_server', 'false') | |
619 | _list_setting(settings, 'vcs.backends', 'hg, git, svn') |
|
619 | _list_setting(settings, 'vcs.backends', 'hg, git, svn') | |
620 | _int_setting(settings, 'vcs.connection_timeout', 3600) |
|
620 | _int_setting(settings, 'vcs.connection_timeout', 3600) | |
621 |
|
621 | |||
622 | # Support legacy values of vcs.scm_app_implementation. Legacy |
|
622 | # Support legacy values of vcs.scm_app_implementation. Legacy | |
623 | # configurations may use 'rhodecode.lib.middleware.utils.scm_app_http', or |
|
623 | # configurations may use 'rhodecode.lib.middleware.utils.scm_app_http', or | |
624 | # disabled since 4.13 'vcsserver.scm_app' which is now mapped to 'http'. |
|
624 | # disabled since 4.13 'vcsserver.scm_app' which is now mapped to 'http'. | |
625 | scm_app_impl = settings['vcs.scm_app_implementation'] |
|
625 | scm_app_impl = settings['vcs.scm_app_implementation'] | |
626 | if scm_app_impl in ['rhodecode.lib.middleware.utils.scm_app_http', 'vcsserver.scm_app']: |
|
626 | if scm_app_impl in ['rhodecode.lib.middleware.utils.scm_app_http', 'vcsserver.scm_app']: | |
627 | settings['vcs.scm_app_implementation'] = 'http' |
|
627 | settings['vcs.scm_app_implementation'] = 'http' | |
628 |
|
628 | |||
629 |
|
629 | |||
630 | def _sanitize_cache_settings(settings): |
|
630 | def _sanitize_cache_settings(settings): | |
631 | temp_store = tempfile.gettempdir() |
|
631 | temp_store = tempfile.gettempdir() | |
632 | default_cache_dir = os.path.join(temp_store, 'rc_cache') |
|
632 | default_cache_dir = os.path.join(temp_store, 'rc_cache') | |
633 |
|
633 | |||
634 | # save default, cache dir, and use it for all backends later. |
|
634 | # save default, cache dir, and use it for all backends later. | |
635 | default_cache_dir = _string_setting( |
|
635 | default_cache_dir = _string_setting( | |
636 | settings, |
|
636 | settings, | |
637 | 'cache_dir', |
|
637 | 'cache_dir', | |
638 | default_cache_dir, lower=False, default_when_empty=True) |
|
638 | default_cache_dir, lower=False, default_when_empty=True) | |
639 |
|
639 | |||
640 | # ensure we have our dir created |
|
640 | # ensure we have our dir created | |
641 | if not os.path.isdir(default_cache_dir): |
|
641 | if not os.path.isdir(default_cache_dir): | |
642 | os.makedirs(default_cache_dir, mode=0o755) |
|
642 | os.makedirs(default_cache_dir, mode=0o755) | |
643 |
|
643 | |||
644 | # exception store cache |
|
644 | # exception store cache | |
645 | _string_setting( |
|
645 | _string_setting( | |
646 | settings, |
|
646 | settings, | |
647 | 'exception_tracker.store_path', |
|
647 | 'exception_tracker.store_path', | |
648 | temp_store, lower=False, default_when_empty=True) |
|
648 | temp_store, lower=False, default_when_empty=True) | |
649 | _bool_setting( |
|
649 | _bool_setting( | |
650 | settings, |
|
650 | settings, | |
651 | 'exception_tracker.send_email', |
|
651 | 'exception_tracker.send_email', | |
652 | 'false') |
|
652 | 'false') | |
653 | _string_setting( |
|
653 | _string_setting( | |
654 | settings, |
|
654 | settings, | |
655 | 'exception_tracker.email_prefix', |
|
655 | 'exception_tracker.email_prefix', | |
656 | '[RHODECODE ERROR]', lower=False, default_when_empty=True) |
|
656 | '[RHODECODE ERROR]', lower=False, default_when_empty=True) | |
657 |
|
657 | |||
658 | # cache_perms |
|
658 | # cache_perms | |
659 | _string_setting( |
|
659 | _string_setting( | |
660 | settings, |
|
660 | settings, | |
661 | 'rc_cache.cache_perms.backend', |
|
661 | 'rc_cache.cache_perms.backend', | |
662 | 'dogpile.cache.rc.file_namespace', lower=False) |
|
662 | 'dogpile.cache.rc.file_namespace', lower=False) | |
663 | _int_setting( |
|
663 | _int_setting( | |
664 | settings, |
|
664 | settings, | |
665 | 'rc_cache.cache_perms.expiration_time', |
|
665 | 'rc_cache.cache_perms.expiration_time', | |
666 | 60) |
|
666 | 60) | |
667 | _string_setting( |
|
667 | _string_setting( | |
668 | settings, |
|
668 | settings, | |
669 | 'rc_cache.cache_perms.arguments.filename', |
|
669 | 'rc_cache.cache_perms.arguments.filename', | |
670 | os.path.join(default_cache_dir, 'rc_cache_1'), lower=False) |
|
670 | os.path.join(default_cache_dir, 'rc_cache_1'), lower=False) | |
671 |
|
671 | |||
672 | # cache_repo |
|
672 | # cache_repo | |
673 | _string_setting( |
|
673 | _string_setting( | |
674 | settings, |
|
674 | settings, | |
675 | 'rc_cache.cache_repo.backend', |
|
675 | 'rc_cache.cache_repo.backend', | |
676 | 'dogpile.cache.rc.file_namespace', lower=False) |
|
676 | 'dogpile.cache.rc.file_namespace', lower=False) | |
677 | _int_setting( |
|
677 | _int_setting( | |
678 | settings, |
|
678 | settings, | |
679 | 'rc_cache.cache_repo.expiration_time', |
|
679 | 'rc_cache.cache_repo.expiration_time', | |
680 | 60) |
|
680 | 60) | |
681 | _string_setting( |
|
681 | _string_setting( | |
682 | settings, |
|
682 | settings, | |
683 | 'rc_cache.cache_repo.arguments.filename', |
|
683 | 'rc_cache.cache_repo.arguments.filename', | |
684 | os.path.join(default_cache_dir, 'rc_cache_2'), lower=False) |
|
684 | os.path.join(default_cache_dir, 'rc_cache_2'), lower=False) | |
685 |
|
685 | |||
686 | # cache_license |
|
686 | # cache_license | |
687 | _string_setting( |
|
687 | _string_setting( | |
688 | settings, |
|
688 | settings, | |
689 | 'rc_cache.cache_license.backend', |
|
689 | 'rc_cache.cache_license.backend', | |
690 | 'dogpile.cache.rc.file_namespace', lower=False) |
|
690 | 'dogpile.cache.rc.file_namespace', lower=False) | |
691 | _int_setting( |
|
691 | _int_setting( | |
692 | settings, |
|
692 | settings, | |
693 | 'rc_cache.cache_license.expiration_time', |
|
693 | 'rc_cache.cache_license.expiration_time', | |
694 | 5*60) |
|
694 | 5*60) | |
695 | _string_setting( |
|
695 | _string_setting( | |
696 | settings, |
|
696 | settings, | |
697 | 'rc_cache.cache_license.arguments.filename', |
|
697 | 'rc_cache.cache_license.arguments.filename', | |
698 | os.path.join(default_cache_dir, 'rc_cache_3'), lower=False) |
|
698 | os.path.join(default_cache_dir, 'rc_cache_3'), lower=False) | |
699 |
|
699 | |||
700 | # cache_repo_longterm memory, 96H |
|
700 | # cache_repo_longterm memory, 96H | |
701 | _string_setting( |
|
701 | _string_setting( | |
702 | settings, |
|
702 | settings, | |
703 | 'rc_cache.cache_repo_longterm.backend', |
|
703 | 'rc_cache.cache_repo_longterm.backend', | |
704 | 'dogpile.cache.rc.memory_lru', lower=False) |
|
704 | 'dogpile.cache.rc.memory_lru', lower=False) | |
705 | _int_setting( |
|
705 | _int_setting( | |
706 | settings, |
|
706 | settings, | |
707 | 'rc_cache.cache_repo_longterm.expiration_time', |
|
707 | 'rc_cache.cache_repo_longterm.expiration_time', | |
708 | 345600) |
|
708 | 345600) | |
709 | _int_setting( |
|
709 | _int_setting( | |
710 | settings, |
|
710 | settings, | |
711 | 'rc_cache.cache_repo_longterm.max_size', |
|
711 | 'rc_cache.cache_repo_longterm.max_size', | |
712 | 10000) |
|
712 | 10000) | |
713 |
|
713 | |||
714 | # sql_cache_short |
|
714 | # sql_cache_short | |
715 | _string_setting( |
|
715 | _string_setting( | |
716 | settings, |
|
716 | settings, | |
717 | 'rc_cache.sql_cache_short.backend', |
|
717 | 'rc_cache.sql_cache_short.backend', | |
718 | 'dogpile.cache.rc.memory_lru', lower=False) |
|
718 | 'dogpile.cache.rc.memory_lru', lower=False) | |
719 | _int_setting( |
|
719 | _int_setting( | |
720 | settings, |
|
720 | settings, | |
721 | 'rc_cache.sql_cache_short.expiration_time', |
|
721 | 'rc_cache.sql_cache_short.expiration_time', | |
722 | 30) |
|
722 | 30) | |
723 | _int_setting( |
|
723 | _int_setting( | |
724 | settings, |
|
724 | settings, | |
725 | 'rc_cache.sql_cache_short.max_size', |
|
725 | 'rc_cache.sql_cache_short.max_size', | |
726 | 10000) |
|
726 | 10000) | |
727 |
|
727 | |||
728 |
|
728 | |||
729 | def _int_setting(settings, name, default): |
|
729 | def _int_setting(settings, name, default): | |
730 | settings[name] = int(settings.get(name, default)) |
|
730 | settings[name] = int(settings.get(name, default)) | |
731 | return settings[name] |
|
731 | return settings[name] | |
732 |
|
732 | |||
733 |
|
733 | |||
734 | def _bool_setting(settings, name, default): |
|
734 | def _bool_setting(settings, name, default): | |
735 | input_val = settings.get(name, default) |
|
735 | input_val = settings.get(name, default) | |
736 | if isinstance(input_val, unicode): |
|
736 | if isinstance(input_val, unicode): | |
737 | input_val = input_val.encode('utf8') |
|
737 | input_val = input_val.encode('utf8') | |
738 | settings[name] = asbool(input_val) |
|
738 | settings[name] = asbool(input_val) | |
739 | return settings[name] |
|
739 | return settings[name] | |
740 |
|
740 | |||
741 |
|
741 | |||
742 | def _list_setting(settings, name, default): |
|
742 | def _list_setting(settings, name, default): | |
743 | raw_value = settings.get(name, default) |
|
743 | raw_value = settings.get(name, default) | |
744 |
|
744 | |||
745 | old_separator = ',' |
|
745 | old_separator = ',' | |
746 | if old_separator in raw_value: |
|
746 | if old_separator in raw_value: | |
747 | # If we get a comma separated list, pass it to our own function. |
|
747 | # If we get a comma separated list, pass it to our own function. | |
748 | settings[name] = rhodecode_aslist(raw_value, sep=old_separator) |
|
748 | settings[name] = rhodecode_aslist(raw_value, sep=old_separator) | |
749 | else: |
|
749 | else: | |
750 | # Otherwise we assume it uses pyramids space/newline separation. |
|
750 | # Otherwise we assume it uses pyramids space/newline separation. | |
751 | settings[name] = aslist(raw_value) |
|
751 | settings[name] = aslist(raw_value) | |
752 | return settings[name] |
|
752 | return settings[name] | |
753 |
|
753 | |||
754 |
|
754 | |||
755 | def _string_setting(settings, name, default, lower=True, default_when_empty=False): |
|
755 | def _string_setting(settings, name, default, lower=True, default_when_empty=False): | |
756 | value = settings.get(name, default) |
|
756 | value = settings.get(name, default) | |
757 |
|
757 | |||
758 | if default_when_empty and not value: |
|
758 | if default_when_empty and not value: | |
759 | # use default value when value is empty |
|
759 | # use default value when value is empty | |
760 | value = default |
|
760 | value = default | |
761 |
|
761 | |||
762 | if lower: |
|
762 | if lower: | |
763 | value = value.lower() |
|
763 | value = value.lower() | |
764 | settings[name] = value |
|
764 | settings[name] = value | |
765 | return settings[name] |
|
765 | return settings[name] | |
766 |
|
766 | |||
767 |
|
767 | |||
768 | def _substitute_values(mapping, substitutions): |
|
768 | def _substitute_values(mapping, substitutions): | |
769 | result = {} |
|
769 | result = {} | |
770 |
|
770 | |||
771 | try: |
|
771 | try: | |
772 | for key, value in mapping.items(): |
|
772 | for key, value in mapping.items(): | |
773 | # initialize without substitution first |
|
773 | # initialize without substitution first | |
774 | result[key] = value |
|
774 | result[key] = value | |
775 |
|
775 | |||
776 | # Note: Cannot use regular replacements, since they would clash |
|
776 | # Note: Cannot use regular replacements, since they would clash | |
777 | # with the implementation of ConfigParser. Using "format" instead. |
|
777 | # with the implementation of ConfigParser. Using "format" instead. | |
778 | try: |
|
778 | try: | |
779 | result[key] = value.format(**substitutions) |
|
779 | result[key] = value.format(**substitutions) | |
780 | except KeyError as e: |
|
780 | except KeyError as e: | |
781 | env_var = '{}'.format(e.args[0]) |
|
781 | env_var = '{}'.format(e.args[0]) | |
782 |
|
782 | |||
783 | msg = 'Failed to substitute: `{key}={{{var}}}` with environment entry. ' \ |
|
783 | msg = 'Failed to substitute: `{key}={{{var}}}` with environment entry. ' \ | |
784 | 'Make sure your environment has {var} set, or remove this ' \ |
|
784 | 'Make sure your environment has {var} set, or remove this ' \ | |
785 | 'variable from config file'.format(key=key, var=env_var) |
|
785 | 'variable from config file'.format(key=key, var=env_var) | |
786 |
|
786 | |||
787 | if env_var.startswith('ENV_'): |
|
787 | if env_var.startswith('ENV_'): | |
788 | raise ValueError(msg) |
|
788 | raise ValueError(msg) | |
789 | else: |
|
789 | else: | |
790 | log.warning(msg) |
|
790 | log.warning(msg) | |
791 |
|
791 | |||
792 | except ValueError as e: |
|
792 | except ValueError as e: | |
793 | log.warning('Failed to substitute ENV variable: %s', e) |
|
793 | log.warning('Failed to substitute ENV variable: %s', e) | |
794 | result = mapping |
|
794 | result = mapping | |
795 |
|
795 | |||
796 | return result |
|
796 | return result |
@@ -1,81 +1,81 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import socket |
|
21 | import socket | |
22 | import logging |
|
22 | import logging | |
23 |
|
23 | |||
24 | import rhodecode |
|
24 | import rhodecode | |
25 | from zope.cachedescriptors.property import Lazy as LazyProperty |
|
25 | from zope.cachedescriptors.property import Lazy as LazyProperty | |
26 | from rhodecode.lib.celerylib.loader import ( |
|
26 | from rhodecode.lib.celerylib.loader import ( | |
27 | celery_app, RequestContextTask, get_logger) |
|
27 | celery_app, RequestContextTask, get_logger) | |
28 | from rhodecode.lib.statsd_client import StatsdClient |
|
28 | from rhodecode.lib.statsd_client import StatsdClient | |
29 |
|
29 | |||
30 | async_task = celery_app.task |
|
30 | async_task = celery_app.task | |
31 |
|
31 | |||
32 |
|
32 | |||
33 | log = logging.getLogger(__name__) |
|
33 | log = logging.getLogger(__name__) | |
34 |
|
34 | |||
35 |
|
35 | |||
36 | class ResultWrapper(object): |
|
36 | class ResultWrapper(object): | |
37 | def __init__(self, task): |
|
37 | def __init__(self, task): | |
38 | self.task = task |
|
38 | self.task = task | |
39 |
|
39 | |||
40 | @LazyProperty |
|
40 | @LazyProperty | |
41 | def result(self): |
|
41 | def result(self): | |
42 | return self.task |
|
42 | return self.task | |
43 |
|
43 | |||
44 |
|
44 | |||
45 | def run_task(task, *args, **kwargs): |
|
45 | def run_task(task, *args, **kwargs): | |
46 | log.debug('Got task `%s` for execution, celery mode enabled:%s', task, rhodecode.CELERY_ENABLED) |
|
46 | log.debug('Got task `%s` for execution, celery mode enabled:%s', task, rhodecode.CELERY_ENABLED) | |
47 | if task is None: |
|
47 | if task is None: | |
48 | raise ValueError('Got non-existing task for execution') |
|
48 | raise ValueError('Got non-existing task for execution') | |
49 |
|
49 | |||
50 | statsd = StatsdClient.statsd |
|
50 | statsd = StatsdClient.statsd | |
51 | exec_mode = 'sync' |
|
51 | exec_mode = 'sync' | |
52 |
|
52 | |||
53 | if rhodecode.CELERY_ENABLED: |
|
53 | if rhodecode.CELERY_ENABLED: | |
54 |
|
54 | |||
55 | try: |
|
55 | try: | |
56 | t = task.apply_async(args=args, kwargs=kwargs) |
|
56 | t = task.apply_async(args=args, kwargs=kwargs) | |
57 | log.debug('executing task %s:%s in async mode', t.task_id, task) |
|
57 | log.debug('executing task %s:%s in async mode', t.task_id, task) | |
58 | exec_mode = 'async' |
|
58 | exec_mode = 'async' | |
59 | return t |
|
59 | return t | |
60 |
|
60 | |||
61 | except socket.error as e: |
|
61 | except socket.error as e: | |
62 | if isinstance(e, IOError) and e.errno == 111: |
|
62 | if isinstance(e, IOError) and e.errno == 111: | |
63 | log.error('Unable to connect to celeryd `%s`. Sync execution', e) |
|
63 | log.error('Unable to connect to celeryd `%s`. Sync execution', e) | |
64 | else: |
|
64 | else: | |
65 | log.exception("Exception while connecting to celeryd.") |
|
65 | log.exception("Exception while connecting to celeryd.") | |
66 | except KeyError as e: |
|
66 | except KeyError as e: | |
67 | log.error('Unable to connect to celeryd `%s`. Sync execution', e) |
|
67 | log.error('Unable to connect to celeryd `%s`. Sync execution', e) | |
68 | except Exception as e: |
|
68 | except Exception as e: | |
69 | log.exception( |
|
69 | log.exception( | |
70 | "Exception while trying to run task asynchronous. " |
|
70 | "Exception while trying to run task asynchronous. " | |
71 | "Fallback to sync execution.") |
|
71 | "Fallback to sync execution.") | |
72 |
|
72 | |||
73 | else: |
|
73 | else: | |
74 | log.debug('executing task %s:%s in sync mode', 'TASK', task) |
|
74 | log.debug('executing task %s:%s in sync mode', 'TASK', task) | |
75 |
|
75 | |||
76 | if statsd: |
|
76 | if statsd: | |
77 | statsd.incr('rhodecode_celery_task', tags=[ |
|
77 | statsd.incr('rhodecode_celery_task_total', tags=[ | |
78 | 'task:{}'.format(task), |
|
78 | 'task:{}'.format(task), | |
79 | 'mode:{}'.format(exec_mode) |
|
79 | 'mode:{}'.format(exec_mode) | |
80 | ]) |
|
80 | ]) | |
81 | return ResultWrapper(task(*args, **kwargs)) |
|
81 | return ResultWrapper(task(*args, **kwargs)) |
@@ -1,310 +1,310 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 | """ |
|
20 | """ | |
21 | Celery loader, run with:: |
|
21 | Celery loader, run with:: | |
22 |
|
22 | |||
23 | celery worker \ |
|
23 | celery worker \ | |
24 | --beat \ |
|
24 | --beat \ | |
25 | --app rhodecode.lib.celerylib.loader \ |
|
25 | --app rhodecode.lib.celerylib.loader \ | |
26 | --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler \ |
|
26 | --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler \ | |
27 | --loglevel DEBUG --ini=._dev/dev.ini |
|
27 | --loglevel DEBUG --ini=._dev/dev.ini | |
28 | """ |
|
28 | """ | |
29 | import os |
|
29 | import os | |
30 | import logging |
|
30 | import logging | |
31 | import importlib |
|
31 | import importlib | |
32 |
|
32 | |||
33 | from celery import Celery |
|
33 | from celery import Celery | |
34 | from celery import signals |
|
34 | from celery import signals | |
35 | from celery import Task |
|
35 | from celery import Task | |
36 | from celery import exceptions # pragma: no cover |
|
36 | from celery import exceptions # pragma: no cover | |
37 | from kombu.serialization import register |
|
37 | from kombu.serialization import register | |
38 | from pyramid.threadlocal import get_current_request |
|
38 | from pyramid.threadlocal import get_current_request | |
39 |
|
39 | |||
40 | import rhodecode |
|
40 | import rhodecode | |
41 |
|
41 | |||
42 | from rhodecode.lib.auth import AuthUser |
|
42 | from rhodecode.lib.auth import AuthUser | |
43 | from rhodecode.lib.celerylib.utils import get_ini_config, parse_ini_vars, ping_db |
|
43 | from rhodecode.lib.celerylib.utils import get_ini_config, parse_ini_vars, ping_db | |
44 | from rhodecode.lib.ext_json import json |
|
44 | from rhodecode.lib.ext_json import json | |
45 | from rhodecode.lib.pyramid_utils import bootstrap, setup_logging, prepare_request |
|
45 | from rhodecode.lib.pyramid_utils import bootstrap, setup_logging, prepare_request | |
46 | from rhodecode.lib.utils2 import str2bool |
|
46 | from rhodecode.lib.utils2 import str2bool | |
47 | from rhodecode.model import meta |
|
47 | from rhodecode.model import meta | |
48 |
|
48 | |||
49 |
|
49 | |||
50 | register('json_ext', json.dumps, json.loads, |
|
50 | register('json_ext', json.dumps, json.loads, | |
51 | content_type='application/x-json-ext', |
|
51 | content_type='application/x-json-ext', | |
52 | content_encoding='utf-8') |
|
52 | content_encoding='utf-8') | |
53 |
|
53 | |||
54 | log = logging.getLogger('celery.rhodecode.loader') |
|
54 | log = logging.getLogger('celery.rhodecode.loader') | |
55 |
|
55 | |||
56 |
|
56 | |||
57 | def add_preload_arguments(parser): |
|
57 | def add_preload_arguments(parser): | |
58 | parser.add_argument( |
|
58 | parser.add_argument( | |
59 | '--ini', default=None, |
|
59 | '--ini', default=None, | |
60 | help='Path to ini configuration file.' |
|
60 | help='Path to ini configuration file.' | |
61 | ) |
|
61 | ) | |
62 | parser.add_argument( |
|
62 | parser.add_argument( | |
63 | '--ini-var', default=None, |
|
63 | '--ini-var', default=None, | |
64 | help='Comma separated list of key=value to pass to ini.' |
|
64 | help='Comma separated list of key=value to pass to ini.' | |
65 | ) |
|
65 | ) | |
66 |
|
66 | |||
67 |
|
67 | |||
68 | def get_logger(obj): |
|
68 | def get_logger(obj): | |
69 | custom_log = logging.getLogger( |
|
69 | custom_log = logging.getLogger( | |
70 | 'rhodecode.task.{}'.format(obj.__class__.__name__)) |
|
70 | 'rhodecode.task.{}'.format(obj.__class__.__name__)) | |
71 |
|
71 | |||
72 | if rhodecode.CELERY_ENABLED: |
|
72 | if rhodecode.CELERY_ENABLED: | |
73 | try: |
|
73 | try: | |
74 | custom_log = obj.get_logger() |
|
74 | custom_log = obj.get_logger() | |
75 | except Exception: |
|
75 | except Exception: | |
76 | pass |
|
76 | pass | |
77 |
|
77 | |||
78 | return custom_log |
|
78 | return custom_log | |
79 |
|
79 | |||
80 |
|
80 | |||
81 | imports = ['rhodecode.lib.celerylib.tasks'] |
|
81 | imports = ['rhodecode.lib.celerylib.tasks'] | |
82 |
|
82 | |||
83 | try: |
|
83 | try: | |
84 | # try if we have EE tasks available |
|
84 | # try if we have EE tasks available | |
85 | importlib.import_module('rc_ee') |
|
85 | importlib.import_module('rc_ee') | |
86 | imports.append('rc_ee.lib.celerylib.tasks') |
|
86 | imports.append('rc_ee.lib.celerylib.tasks') | |
87 | except ImportError: |
|
87 | except ImportError: | |
88 | pass |
|
88 | pass | |
89 |
|
89 | |||
90 |
|
90 | |||
91 | base_celery_config = { |
|
91 | base_celery_config = { | |
92 | 'result_backend': 'rpc://', |
|
92 | 'result_backend': 'rpc://', | |
93 | 'result_expires': 60 * 60 * 24, |
|
93 | 'result_expires': 60 * 60 * 24, | |
94 | 'result_persistent': True, |
|
94 | 'result_persistent': True, | |
95 | 'imports': imports, |
|
95 | 'imports': imports, | |
96 | 'worker_max_tasks_per_child': 100, |
|
96 | 'worker_max_tasks_per_child': 100, | |
97 | 'accept_content': ['json_ext'], |
|
97 | 'accept_content': ['json_ext'], | |
98 | 'task_serializer': 'json_ext', |
|
98 | 'task_serializer': 'json_ext', | |
99 | 'result_serializer': 'json_ext', |
|
99 | 'result_serializer': 'json_ext', | |
100 | 'worker_hijack_root_logger': False, |
|
100 | 'worker_hijack_root_logger': False, | |
101 | 'database_table_names': { |
|
101 | 'database_table_names': { | |
102 | 'task': 'beat_taskmeta', |
|
102 | 'task': 'beat_taskmeta', | |
103 | 'group': 'beat_groupmeta', |
|
103 | 'group': 'beat_groupmeta', | |
104 | } |
|
104 | } | |
105 | } |
|
105 | } | |
106 | # init main celery app |
|
106 | # init main celery app | |
107 | celery_app = Celery() |
|
107 | celery_app = Celery() | |
108 | celery_app.user_options['preload'].add(add_preload_arguments) |
|
108 | celery_app.user_options['preload'].add(add_preload_arguments) | |
109 | ini_file_glob = None |
|
109 | ini_file_glob = None | |
110 |
|
110 | |||
111 |
|
111 | |||
112 | @signals.setup_logging.connect |
|
112 | @signals.setup_logging.connect | |
113 | def setup_logging_callback(**kwargs): |
|
113 | def setup_logging_callback(**kwargs): | |
114 | setup_logging(ini_file_glob) |
|
114 | setup_logging(ini_file_glob) | |
115 |
|
115 | |||
116 |
|
116 | |||
117 | @signals.user_preload_options.connect |
|
117 | @signals.user_preload_options.connect | |
118 | def on_preload_parsed(options, **kwargs): |
|
118 | def on_preload_parsed(options, **kwargs): | |
119 | ini_location = options['ini'] |
|
119 | ini_location = options['ini'] | |
120 | ini_vars = options['ini_var'] |
|
120 | ini_vars = options['ini_var'] | |
121 | celery_app.conf['INI_PYRAMID'] = options['ini'] |
|
121 | celery_app.conf['INI_PYRAMID'] = options['ini'] | |
122 |
|
122 | |||
123 | if ini_location is None: |
|
123 | if ini_location is None: | |
124 | print('You must provide the paste --ini argument') |
|
124 | print('You must provide the paste --ini argument') | |
125 | exit(-1) |
|
125 | exit(-1) | |
126 |
|
126 | |||
127 | options = None |
|
127 | options = None | |
128 | if ini_vars is not None: |
|
128 | if ini_vars is not None: | |
129 | options = parse_ini_vars(ini_vars) |
|
129 | options = parse_ini_vars(ini_vars) | |
130 |
|
130 | |||
131 | global ini_file_glob |
|
131 | global ini_file_glob | |
132 | ini_file_glob = ini_location |
|
132 | ini_file_glob = ini_location | |
133 |
|
133 | |||
134 | log.debug('Bootstrapping RhodeCode application...') |
|
134 | log.debug('Bootstrapping RhodeCode application...') | |
135 | env = bootstrap(ini_location, options=options) |
|
135 | env = bootstrap(ini_location, options=options) | |
136 |
|
136 | |||
137 | setup_celery_app( |
|
137 | setup_celery_app( | |
138 | app=env['app'], root=env['root'], request=env['request'], |
|
138 | app=env['app'], root=env['root'], request=env['request'], | |
139 | registry=env['registry'], closer=env['closer'], |
|
139 | registry=env['registry'], closer=env['closer'], | |
140 | ini_location=ini_location) |
|
140 | ini_location=ini_location) | |
141 |
|
141 | |||
142 | # fix the global flag even if it's disabled via .ini file because this |
|
142 | # fix the global flag even if it's disabled via .ini file because this | |
143 | # is a worker code that doesn't need this to be disabled. |
|
143 | # is a worker code that doesn't need this to be disabled. | |
144 | rhodecode.CELERY_ENABLED = True |
|
144 | rhodecode.CELERY_ENABLED = True | |
145 |
|
145 | |||
146 |
|
146 | |||
147 | @signals.task_prerun.connect |
|
147 | @signals.task_prerun.connect | |
148 | def task_prerun_signal(task_id, task, args, **kwargs): |
|
148 | def task_prerun_signal(task_id, task, args, **kwargs): | |
149 | ping_db() |
|
149 | ping_db() | |
150 |
|
150 | |||
151 |
|
151 | |||
152 | @signals.task_success.connect |
|
152 | @signals.task_success.connect | |
153 | def task_success_signal(result, **kwargs): |
|
153 | def task_success_signal(result, **kwargs): | |
154 | meta.Session.commit() |
|
154 | meta.Session.commit() | |
155 | closer = celery_app.conf['PYRAMID_CLOSER'] |
|
155 | closer = celery_app.conf['PYRAMID_CLOSER'] | |
156 | if closer: |
|
156 | if closer: | |
157 | closer() |
|
157 | closer() | |
158 |
|
158 | |||
159 |
|
159 | |||
160 | @signals.task_retry.connect |
|
160 | @signals.task_retry.connect | |
161 | def task_retry_signal( |
|
161 | def task_retry_signal( | |
162 | request, reason, einfo, **kwargs): |
|
162 | request, reason, einfo, **kwargs): | |
163 | meta.Session.remove() |
|
163 | meta.Session.remove() | |
164 | closer = celery_app.conf['PYRAMID_CLOSER'] |
|
164 | closer = celery_app.conf['PYRAMID_CLOSER'] | |
165 | if closer: |
|
165 | if closer: | |
166 | closer() |
|
166 | closer() | |
167 |
|
167 | |||
168 |
|
168 | |||
169 | @signals.task_failure.connect |
|
169 | @signals.task_failure.connect | |
170 | def task_failure_signal( |
|
170 | def task_failure_signal( | |
171 | task_id, exception, args, kwargs, traceback, einfo, **kargs): |
|
171 | task_id, exception, args, kwargs, traceback, einfo, **kargs): | |
172 | from rhodecode.lib.exc_tracking import store_exception |
|
172 | from rhodecode.lib.exc_tracking import store_exception | |
173 | from rhodecode.lib.statsd_client import StatsdClient |
|
173 | from rhodecode.lib.statsd_client import StatsdClient | |
174 |
|
174 | |||
175 | meta.Session.remove() |
|
175 | meta.Session.remove() | |
176 |
|
176 | |||
177 | # simulate sys.exc_info() |
|
177 | # simulate sys.exc_info() | |
178 | exc_info = (einfo.type, einfo.exception, einfo.tb) |
|
178 | exc_info = (einfo.type, einfo.exception, einfo.tb) | |
179 | store_exception(id(exc_info), exc_info, prefix='rhodecode-celery') |
|
179 | store_exception(id(exc_info), exc_info, prefix='rhodecode-celery') | |
180 | statsd = StatsdClient.statsd |
|
180 | statsd = StatsdClient.statsd | |
181 | if statsd: |
|
181 | if statsd: | |
182 | statsd.incr('rhodecode_exception', tags=["celery"]) |
|
182 | statsd.incr('rhodecode_exception_total', tags=["exc_source:celery"]) | |
183 |
|
183 | |||
184 | closer = celery_app.conf['PYRAMID_CLOSER'] |
|
184 | closer = celery_app.conf['PYRAMID_CLOSER'] | |
185 | if closer: |
|
185 | if closer: | |
186 | closer() |
|
186 | closer() | |
187 |
|
187 | |||
188 |
|
188 | |||
189 | @signals.task_revoked.connect |
|
189 | @signals.task_revoked.connect | |
190 | def task_revoked_signal( |
|
190 | def task_revoked_signal( | |
191 | request, terminated, signum, expired, **kwargs): |
|
191 | request, terminated, signum, expired, **kwargs): | |
192 | closer = celery_app.conf['PYRAMID_CLOSER'] |
|
192 | closer = celery_app.conf['PYRAMID_CLOSER'] | |
193 | if closer: |
|
193 | if closer: | |
194 | closer() |
|
194 | closer() | |
195 |
|
195 | |||
196 |
|
196 | |||
197 | def setup_celery_app(app, root, request, registry, closer, ini_location): |
|
197 | def setup_celery_app(app, root, request, registry, closer, ini_location): | |
198 | ini_dir = os.path.dirname(os.path.abspath(ini_location)) |
|
198 | ini_dir = os.path.dirname(os.path.abspath(ini_location)) | |
199 | celery_config = base_celery_config |
|
199 | celery_config = base_celery_config | |
200 | celery_config.update({ |
|
200 | celery_config.update({ | |
201 | # store celerybeat scheduler db where the .ini file is |
|
201 | # store celerybeat scheduler db where the .ini file is | |
202 | 'beat_schedule_filename': os.path.join(ini_dir, 'celerybeat-schedule'), |
|
202 | 'beat_schedule_filename': os.path.join(ini_dir, 'celerybeat-schedule'), | |
203 | }) |
|
203 | }) | |
204 | ini_settings = get_ini_config(ini_location) |
|
204 | ini_settings = get_ini_config(ini_location) | |
205 | log.debug('Got custom celery conf: %s', ini_settings) |
|
205 | log.debug('Got custom celery conf: %s', ini_settings) | |
206 |
|
206 | |||
207 | celery_config.update(ini_settings) |
|
207 | celery_config.update(ini_settings) | |
208 | celery_app.config_from_object(celery_config) |
|
208 | celery_app.config_from_object(celery_config) | |
209 |
|
209 | |||
210 | celery_app.conf.update({'PYRAMID_APP': app}) |
|
210 | celery_app.conf.update({'PYRAMID_APP': app}) | |
211 | celery_app.conf.update({'PYRAMID_ROOT': root}) |
|
211 | celery_app.conf.update({'PYRAMID_ROOT': root}) | |
212 | celery_app.conf.update({'PYRAMID_REQUEST': request}) |
|
212 | celery_app.conf.update({'PYRAMID_REQUEST': request}) | |
213 | celery_app.conf.update({'PYRAMID_REGISTRY': registry}) |
|
213 | celery_app.conf.update({'PYRAMID_REGISTRY': registry}) | |
214 | celery_app.conf.update({'PYRAMID_CLOSER': closer}) |
|
214 | celery_app.conf.update({'PYRAMID_CLOSER': closer}) | |
215 |
|
215 | |||
216 |
|
216 | |||
217 | def configure_celery(config, ini_location): |
|
217 | def configure_celery(config, ini_location): | |
218 | """ |
|
218 | """ | |
219 | Helper that is called from our application creation logic. It gives |
|
219 | Helper that is called from our application creation logic. It gives | |
220 | connection info into running webapp and allows execution of tasks from |
|
220 | connection info into running webapp and allows execution of tasks from | |
221 | RhodeCode itself |
|
221 | RhodeCode itself | |
222 | """ |
|
222 | """ | |
223 | # store some globals into rhodecode |
|
223 | # store some globals into rhodecode | |
224 | rhodecode.CELERY_ENABLED = str2bool( |
|
224 | rhodecode.CELERY_ENABLED = str2bool( | |
225 | config.registry.settings.get('use_celery')) |
|
225 | config.registry.settings.get('use_celery')) | |
226 | if rhodecode.CELERY_ENABLED: |
|
226 | if rhodecode.CELERY_ENABLED: | |
227 | log.info('Configuring celery based on `%s` file', ini_location) |
|
227 | log.info('Configuring celery based on `%s` file', ini_location) | |
228 | setup_celery_app( |
|
228 | setup_celery_app( | |
229 | app=None, root=None, request=None, registry=config.registry, |
|
229 | app=None, root=None, request=None, registry=config.registry, | |
230 | closer=None, ini_location=ini_location) |
|
230 | closer=None, ini_location=ini_location) | |
231 |
|
231 | |||
232 |
|
232 | |||
233 | def maybe_prepare_env(req): |
|
233 | def maybe_prepare_env(req): | |
234 | environ = {} |
|
234 | environ = {} | |
235 | try: |
|
235 | try: | |
236 | environ.update({ |
|
236 | environ.update({ | |
237 | 'PATH_INFO': req.environ['PATH_INFO'], |
|
237 | 'PATH_INFO': req.environ['PATH_INFO'], | |
238 | 'SCRIPT_NAME': req.environ['SCRIPT_NAME'], |
|
238 | 'SCRIPT_NAME': req.environ['SCRIPT_NAME'], | |
239 | 'HTTP_HOST':req.environ.get('HTTP_HOST', req.environ['SERVER_NAME']), |
|
239 | 'HTTP_HOST':req.environ.get('HTTP_HOST', req.environ['SERVER_NAME']), | |
240 | 'SERVER_NAME': req.environ['SERVER_NAME'], |
|
240 | 'SERVER_NAME': req.environ['SERVER_NAME'], | |
241 | 'SERVER_PORT': req.environ['SERVER_PORT'], |
|
241 | 'SERVER_PORT': req.environ['SERVER_PORT'], | |
242 | 'wsgi.url_scheme': req.environ['wsgi.url_scheme'], |
|
242 | 'wsgi.url_scheme': req.environ['wsgi.url_scheme'], | |
243 | }) |
|
243 | }) | |
244 | except Exception: |
|
244 | except Exception: | |
245 | pass |
|
245 | pass | |
246 |
|
246 | |||
247 | return environ |
|
247 | return environ | |
248 |
|
248 | |||
249 |
|
249 | |||
250 | class RequestContextTask(Task): |
|
250 | class RequestContextTask(Task): | |
251 | """ |
|
251 | """ | |
252 | This is a celery task which will create a rhodecode app instance context |
|
252 | This is a celery task which will create a rhodecode app instance context | |
253 | for the task, patch pyramid with the original request |
|
253 | for the task, patch pyramid with the original request | |
254 | that created the task and also add the user to the context. |
|
254 | that created the task and also add the user to the context. | |
255 | """ |
|
255 | """ | |
256 |
|
256 | |||
257 | def apply_async(self, args=None, kwargs=None, task_id=None, producer=None, |
|
257 | def apply_async(self, args=None, kwargs=None, task_id=None, producer=None, | |
258 | link=None, link_error=None, shadow=None, **options): |
|
258 | link=None, link_error=None, shadow=None, **options): | |
259 | """ queue the job to run (we are in web request context here) """ |
|
259 | """ queue the job to run (we are in web request context here) """ | |
260 |
|
260 | |||
261 | req = get_current_request() |
|
261 | req = get_current_request() | |
262 |
|
262 | |||
263 | # web case |
|
263 | # web case | |
264 | if hasattr(req, 'user'): |
|
264 | if hasattr(req, 'user'): | |
265 | ip_addr = req.user.ip_addr |
|
265 | ip_addr = req.user.ip_addr | |
266 | user_id = req.user.user_id |
|
266 | user_id = req.user.user_id | |
267 |
|
267 | |||
268 | # api case |
|
268 | # api case | |
269 | elif hasattr(req, 'rpc_user'): |
|
269 | elif hasattr(req, 'rpc_user'): | |
270 | ip_addr = req.rpc_user.ip_addr |
|
270 | ip_addr = req.rpc_user.ip_addr | |
271 | user_id = req.rpc_user.user_id |
|
271 | user_id = req.rpc_user.user_id | |
272 | else: |
|
272 | else: | |
273 | raise Exception( |
|
273 | raise Exception( | |
274 | 'Unable to fetch required data from request: {}. \n' |
|
274 | 'Unable to fetch required data from request: {}. \n' | |
275 | 'This task is required to be executed from context of ' |
|
275 | 'This task is required to be executed from context of ' | |
276 | 'request in a webapp'.format(repr(req))) |
|
276 | 'request in a webapp'.format(repr(req))) | |
277 |
|
277 | |||
278 | if req: |
|
278 | if req: | |
279 | # we hook into kwargs since it is the only way to pass our data to |
|
279 | # we hook into kwargs since it is the only way to pass our data to | |
280 | # the celery worker |
|
280 | # the celery worker | |
281 | environ = maybe_prepare_env(req) |
|
281 | environ = maybe_prepare_env(req) | |
282 | options['headers'] = options.get('headers', {}) |
|
282 | options['headers'] = options.get('headers', {}) | |
283 | options['headers'].update({ |
|
283 | options['headers'].update({ | |
284 | 'rhodecode_proxy_data': { |
|
284 | 'rhodecode_proxy_data': { | |
285 | 'environ': environ, |
|
285 | 'environ': environ, | |
286 | 'auth_user': { |
|
286 | 'auth_user': { | |
287 | 'ip_addr': ip_addr, |
|
287 | 'ip_addr': ip_addr, | |
288 | 'user_id': user_id |
|
288 | 'user_id': user_id | |
289 | }, |
|
289 | }, | |
290 | } |
|
290 | } | |
291 | }) |
|
291 | }) | |
292 |
|
292 | |||
293 | return super(RequestContextTask, self).apply_async( |
|
293 | return super(RequestContextTask, self).apply_async( | |
294 | args, kwargs, task_id, producer, link, link_error, shadow, **options) |
|
294 | args, kwargs, task_id, producer, link, link_error, shadow, **options) | |
295 |
|
295 | |||
296 | def __call__(self, *args, **kwargs): |
|
296 | def __call__(self, *args, **kwargs): | |
297 | """ rebuild the context and then run task on celery worker """ |
|
297 | """ rebuild the context and then run task on celery worker """ | |
298 |
|
298 | |||
299 | proxy_data = getattr(self.request, 'rhodecode_proxy_data', None) |
|
299 | proxy_data = getattr(self.request, 'rhodecode_proxy_data', None) | |
300 | if not proxy_data: |
|
300 | if not proxy_data: | |
301 | return super(RequestContextTask, self).__call__(*args, **kwargs) |
|
301 | return super(RequestContextTask, self).__call__(*args, **kwargs) | |
302 |
|
302 | |||
303 | log.debug('using celery proxy data to run task: %r', proxy_data) |
|
303 | log.debug('using celery proxy data to run task: %r', proxy_data) | |
304 | # re-inject and register threadlocals for proper routing support |
|
304 | # re-inject and register threadlocals for proper routing support | |
305 | request = prepare_request(proxy_data['environ']) |
|
305 | request = prepare_request(proxy_data['environ']) | |
306 | request.user = AuthUser(user_id=proxy_data['auth_user']['user_id'], |
|
306 | request.user = AuthUser(user_id=proxy_data['auth_user']['user_id'], | |
307 | ip_addr=proxy_data['auth_user']['ip_addr']) |
|
307 | ip_addr=proxy_data['auth_user']['ip_addr']) | |
308 |
|
308 | |||
309 | return super(RequestContextTask, self).__call__(*args, **kwargs) |
|
309 | return super(RequestContextTask, self).__call__(*args, **kwargs) | |
310 |
|
310 |
@@ -1,410 +1,414 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2012-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2012-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | """ |
|
21 | """ | |
22 | RhodeCode task modules, containing all task that suppose to be run |
|
22 | RhodeCode task modules, containing all task that suppose to be run | |
23 | by celery daemon |
|
23 | by celery daemon | |
24 | """ |
|
24 | """ | |
25 |
|
25 | |||
26 | import os |
|
26 | import os | |
27 | import time |
|
27 | import time | |
28 |
|
28 | |||
29 | from pyramid import compat |
|
29 | from pyramid import compat | |
30 | from pyramid_mailer.mailer import Mailer |
|
30 | from pyramid_mailer.mailer import Mailer | |
31 | from pyramid_mailer.message import Message |
|
31 | from pyramid_mailer.message import Message | |
32 | from email.utils import formatdate |
|
32 | from email.utils import formatdate | |
33 |
|
33 | |||
34 | import rhodecode |
|
34 | import rhodecode | |
35 | from rhodecode.lib import audit_logger |
|
35 | from rhodecode.lib import audit_logger | |
36 | from rhodecode.lib.celerylib import get_logger, async_task, RequestContextTask, run_task |
|
36 | from rhodecode.lib.celerylib import get_logger, async_task, RequestContextTask, run_task | |
37 | from rhodecode.lib import hooks_base |
|
37 | from rhodecode.lib import hooks_base | |
38 | from rhodecode.lib.utils2 import safe_int, str2bool, aslist |
|
38 | from rhodecode.lib.utils2 import safe_int, str2bool, aslist | |
|
39 | from rhodecode.lib.statsd_client import StatsdClient | |||
39 | from rhodecode.model.db import ( |
|
40 | from rhodecode.model.db import ( | |
40 | Session, IntegrityError, true, Repository, RepoGroup, User) |
|
41 | Session, IntegrityError, true, Repository, RepoGroup, User) | |
41 | from rhodecode.model.permission import PermissionModel |
|
42 | from rhodecode.model.permission import PermissionModel | |
42 |
|
43 | |||
43 |
|
44 | |||
44 | @async_task(ignore_result=True, base=RequestContextTask) |
|
45 | @async_task(ignore_result=True, base=RequestContextTask) | |
45 | def send_email(recipients, subject, body='', html_body='', email_config=None, |
|
46 | def send_email(recipients, subject, body='', html_body='', email_config=None, | |
46 | extra_headers=None): |
|
47 | extra_headers=None): | |
47 | """ |
|
48 | """ | |
48 | Sends an email with defined parameters from the .ini files. |
|
49 | Sends an email with defined parameters from the .ini files. | |
49 |
|
50 | |||
50 | :param recipients: list of recipients, it this is empty the defined email |
|
51 | :param recipients: list of recipients, it this is empty the defined email | |
51 | address from field 'email_to' is used instead |
|
52 | address from field 'email_to' is used instead | |
52 | :param subject: subject of the mail |
|
53 | :param subject: subject of the mail | |
53 | :param body: body of the mail |
|
54 | :param body: body of the mail | |
54 | :param html_body: html version of body |
|
55 | :param html_body: html version of body | |
55 | :param email_config: specify custom configuration for mailer |
|
56 | :param email_config: specify custom configuration for mailer | |
56 | :param extra_headers: specify custom headers |
|
57 | :param extra_headers: specify custom headers | |
57 | """ |
|
58 | """ | |
58 | log = get_logger(send_email) |
|
59 | log = get_logger(send_email) | |
59 |
|
60 | |||
60 | email_config = email_config or rhodecode.CONFIG |
|
61 | email_config = email_config or rhodecode.CONFIG | |
61 |
|
62 | |||
62 | mail_server = email_config.get('smtp_server') or None |
|
63 | mail_server = email_config.get('smtp_server') or None | |
63 | if mail_server is None: |
|
64 | if mail_server is None: | |
64 | log.error("SMTP server information missing. Sending email failed. " |
|
65 | log.error("SMTP server information missing. Sending email failed. " | |
65 | "Make sure that `smtp_server` variable is configured " |
|
66 | "Make sure that `smtp_server` variable is configured " | |
66 | "inside the .ini file") |
|
67 | "inside the .ini file") | |
67 | return False |
|
68 | return False | |
68 |
|
69 | |||
69 | subject = "%s %s" % (email_config.get('email_prefix', ''), subject) |
|
70 | subject = "%s %s" % (email_config.get('email_prefix', ''), subject) | |
70 |
|
71 | |||
71 | if recipients: |
|
72 | if recipients: | |
72 | if isinstance(recipients, compat.string_types): |
|
73 | if isinstance(recipients, compat.string_types): | |
73 | recipients = recipients.split(',') |
|
74 | recipients = recipients.split(',') | |
74 | else: |
|
75 | else: | |
75 | # if recipients are not defined we send to email_config + all admins |
|
76 | # if recipients are not defined we send to email_config + all admins | |
76 | admins = [] |
|
77 | admins = [] | |
77 | for u in User.query().filter(User.admin == true()).all(): |
|
78 | for u in User.query().filter(User.admin == true()).all(): | |
78 | if u.email: |
|
79 | if u.email: | |
79 | admins.append(u.email) |
|
80 | admins.append(u.email) | |
80 | recipients = [] |
|
81 | recipients = [] | |
81 | config_email = email_config.get('email_to') |
|
82 | config_email = email_config.get('email_to') | |
82 | if config_email: |
|
83 | if config_email: | |
83 | recipients += [config_email] |
|
84 | recipients += [config_email] | |
84 | recipients += admins |
|
85 | recipients += admins | |
85 |
|
86 | |||
86 | # translate our LEGACY config into the one that pyramid_mailer supports |
|
87 | # translate our LEGACY config into the one that pyramid_mailer supports | |
87 | email_conf = dict( |
|
88 | email_conf = dict( | |
88 | host=mail_server, |
|
89 | host=mail_server, | |
89 | port=email_config.get('smtp_port', 25), |
|
90 | port=email_config.get('smtp_port', 25), | |
90 | username=email_config.get('smtp_username'), |
|
91 | username=email_config.get('smtp_username'), | |
91 | password=email_config.get('smtp_password'), |
|
92 | password=email_config.get('smtp_password'), | |
92 |
|
93 | |||
93 | tls=str2bool(email_config.get('smtp_use_tls')), |
|
94 | tls=str2bool(email_config.get('smtp_use_tls')), | |
94 | ssl=str2bool(email_config.get('smtp_use_ssl')), |
|
95 | ssl=str2bool(email_config.get('smtp_use_ssl')), | |
95 |
|
96 | |||
96 | # SSL key file |
|
97 | # SSL key file | |
97 | # keyfile='', |
|
98 | # keyfile='', | |
98 |
|
99 | |||
99 | # SSL certificate file |
|
100 | # SSL certificate file | |
100 | # certfile='', |
|
101 | # certfile='', | |
101 |
|
102 | |||
102 | # Location of maildir |
|
103 | # Location of maildir | |
103 | # queue_path='', |
|
104 | # queue_path='', | |
104 |
|
105 | |||
105 | default_sender=email_config.get('app_email_from', 'RhodeCode-noreply@rhodecode.com'), |
|
106 | default_sender=email_config.get('app_email_from', 'RhodeCode-noreply@rhodecode.com'), | |
106 |
|
107 | |||
107 | debug=str2bool(email_config.get('smtp_debug')), |
|
108 | debug=str2bool(email_config.get('smtp_debug')), | |
108 | # /usr/sbin/sendmail Sendmail executable |
|
109 | # /usr/sbin/sendmail Sendmail executable | |
109 | # sendmail_app='', |
|
110 | # sendmail_app='', | |
110 |
|
111 | |||
111 | # {sendmail_app} -t -i -f {sender} Template for sendmail execution |
|
112 | # {sendmail_app} -t -i -f {sender} Template for sendmail execution | |
112 | # sendmail_template='', |
|
113 | # sendmail_template='', | |
113 | ) |
|
114 | ) | |
114 |
|
115 | |||
115 | if extra_headers is None: |
|
116 | if extra_headers is None: | |
116 | extra_headers = {} |
|
117 | extra_headers = {} | |
117 |
|
118 | |||
118 | extra_headers.setdefault('Date', formatdate(time.time())) |
|
119 | extra_headers.setdefault('Date', formatdate(time.time())) | |
119 |
|
120 | |||
120 | if 'thread_ids' in extra_headers: |
|
121 | if 'thread_ids' in extra_headers: | |
121 | thread_ids = extra_headers.pop('thread_ids') |
|
122 | thread_ids = extra_headers.pop('thread_ids') | |
122 | extra_headers['References'] = ' '.join('<{}>'.format(t) for t in thread_ids) |
|
123 | extra_headers['References'] = ' '.join('<{}>'.format(t) for t in thread_ids) | |
123 |
|
124 | |||
124 | try: |
|
125 | try: | |
125 | mailer = Mailer(**email_conf) |
|
126 | mailer = Mailer(**email_conf) | |
126 |
|
127 | |||
127 | message = Message(subject=subject, |
|
128 | message = Message(subject=subject, | |
128 | sender=email_conf['default_sender'], |
|
129 | sender=email_conf['default_sender'], | |
129 | recipients=recipients, |
|
130 | recipients=recipients, | |
130 | body=body, html=html_body, |
|
131 | body=body, html=html_body, | |
131 | extra_headers=extra_headers) |
|
132 | extra_headers=extra_headers) | |
132 | mailer.send_immediately(message) |
|
133 | mailer.send_immediately(message) | |
|
134 | statsd = StatsdClient.statsd | |||
|
135 | if statsd: | |||
|
136 | statsd.incr('rhodecode_email_sent_total') | |||
133 |
|
137 | |||
134 | except Exception: |
|
138 | except Exception: | |
135 | log.exception('Mail sending failed') |
|
139 | log.exception('Mail sending failed') | |
136 | return False |
|
140 | return False | |
137 | return True |
|
141 | return True | |
138 |
|
142 | |||
139 |
|
143 | |||
140 | @async_task(ignore_result=True, base=RequestContextTask) |
|
144 | @async_task(ignore_result=True, base=RequestContextTask) | |
141 | def create_repo(form_data, cur_user): |
|
145 | def create_repo(form_data, cur_user): | |
142 | from rhodecode.model.repo import RepoModel |
|
146 | from rhodecode.model.repo import RepoModel | |
143 | from rhodecode.model.user import UserModel |
|
147 | from rhodecode.model.user import UserModel | |
144 | from rhodecode.model.scm import ScmModel |
|
148 | from rhodecode.model.scm import ScmModel | |
145 | from rhodecode.model.settings import SettingsModel |
|
149 | from rhodecode.model.settings import SettingsModel | |
146 |
|
150 | |||
147 | log = get_logger(create_repo) |
|
151 | log = get_logger(create_repo) | |
148 |
|
152 | |||
149 | cur_user = UserModel()._get_user(cur_user) |
|
153 | cur_user = UserModel()._get_user(cur_user) | |
150 | owner = cur_user |
|
154 | owner = cur_user | |
151 |
|
155 | |||
152 | repo_name = form_data['repo_name'] |
|
156 | repo_name = form_data['repo_name'] | |
153 | repo_name_full = form_data['repo_name_full'] |
|
157 | repo_name_full = form_data['repo_name_full'] | |
154 | repo_type = form_data['repo_type'] |
|
158 | repo_type = form_data['repo_type'] | |
155 | description = form_data['repo_description'] |
|
159 | description = form_data['repo_description'] | |
156 | private = form_data['repo_private'] |
|
160 | private = form_data['repo_private'] | |
157 | clone_uri = form_data.get('clone_uri') |
|
161 | clone_uri = form_data.get('clone_uri') | |
158 | repo_group = safe_int(form_data['repo_group']) |
|
162 | repo_group = safe_int(form_data['repo_group']) | |
159 | copy_fork_permissions = form_data.get('copy_permissions') |
|
163 | copy_fork_permissions = form_data.get('copy_permissions') | |
160 | copy_group_permissions = form_data.get('repo_copy_permissions') |
|
164 | copy_group_permissions = form_data.get('repo_copy_permissions') | |
161 | fork_of = form_data.get('fork_parent_id') |
|
165 | fork_of = form_data.get('fork_parent_id') | |
162 | state = form_data.get('repo_state', Repository.STATE_PENDING) |
|
166 | state = form_data.get('repo_state', Repository.STATE_PENDING) | |
163 |
|
167 | |||
164 | # repo creation defaults, private and repo_type are filled in form |
|
168 | # repo creation defaults, private and repo_type are filled in form | |
165 | defs = SettingsModel().get_default_repo_settings(strip_prefix=True) |
|
169 | defs = SettingsModel().get_default_repo_settings(strip_prefix=True) | |
166 | enable_statistics = form_data.get( |
|
170 | enable_statistics = form_data.get( | |
167 | 'enable_statistics', defs.get('repo_enable_statistics')) |
|
171 | 'enable_statistics', defs.get('repo_enable_statistics')) | |
168 | enable_locking = form_data.get( |
|
172 | enable_locking = form_data.get( | |
169 | 'enable_locking', defs.get('repo_enable_locking')) |
|
173 | 'enable_locking', defs.get('repo_enable_locking')) | |
170 | enable_downloads = form_data.get( |
|
174 | enable_downloads = form_data.get( | |
171 | 'enable_downloads', defs.get('repo_enable_downloads')) |
|
175 | 'enable_downloads', defs.get('repo_enable_downloads')) | |
172 |
|
176 | |||
173 | # set landing rev based on default branches for SCM |
|
177 | # set landing rev based on default branches for SCM | |
174 | landing_ref, _label = ScmModel.backend_landing_ref(repo_type) |
|
178 | landing_ref, _label = ScmModel.backend_landing_ref(repo_type) | |
175 |
|
179 | |||
176 | try: |
|
180 | try: | |
177 | RepoModel()._create_repo( |
|
181 | RepoModel()._create_repo( | |
178 | repo_name=repo_name_full, |
|
182 | repo_name=repo_name_full, | |
179 | repo_type=repo_type, |
|
183 | repo_type=repo_type, | |
180 | description=description, |
|
184 | description=description, | |
181 | owner=owner, |
|
185 | owner=owner, | |
182 | private=private, |
|
186 | private=private, | |
183 | clone_uri=clone_uri, |
|
187 | clone_uri=clone_uri, | |
184 | repo_group=repo_group, |
|
188 | repo_group=repo_group, | |
185 | landing_rev=landing_ref, |
|
189 | landing_rev=landing_ref, | |
186 | fork_of=fork_of, |
|
190 | fork_of=fork_of, | |
187 | copy_fork_permissions=copy_fork_permissions, |
|
191 | copy_fork_permissions=copy_fork_permissions, | |
188 | copy_group_permissions=copy_group_permissions, |
|
192 | copy_group_permissions=copy_group_permissions, | |
189 | enable_statistics=enable_statistics, |
|
193 | enable_statistics=enable_statistics, | |
190 | enable_locking=enable_locking, |
|
194 | enable_locking=enable_locking, | |
191 | enable_downloads=enable_downloads, |
|
195 | enable_downloads=enable_downloads, | |
192 | state=state |
|
196 | state=state | |
193 | ) |
|
197 | ) | |
194 | Session().commit() |
|
198 | Session().commit() | |
195 |
|
199 | |||
196 | # now create this repo on Filesystem |
|
200 | # now create this repo on Filesystem | |
197 | RepoModel()._create_filesystem_repo( |
|
201 | RepoModel()._create_filesystem_repo( | |
198 | repo_name=repo_name, |
|
202 | repo_name=repo_name, | |
199 | repo_type=repo_type, |
|
203 | repo_type=repo_type, | |
200 | repo_group=RepoModel()._get_repo_group(repo_group), |
|
204 | repo_group=RepoModel()._get_repo_group(repo_group), | |
201 | clone_uri=clone_uri, |
|
205 | clone_uri=clone_uri, | |
202 | ) |
|
206 | ) | |
203 | repo = Repository.get_by_repo_name(repo_name_full) |
|
207 | repo = Repository.get_by_repo_name(repo_name_full) | |
204 | hooks_base.create_repository(created_by=owner.username, **repo.get_dict()) |
|
208 | hooks_base.create_repository(created_by=owner.username, **repo.get_dict()) | |
205 |
|
209 | |||
206 | # update repo commit caches initially |
|
210 | # update repo commit caches initially | |
207 | repo.update_commit_cache() |
|
211 | repo.update_commit_cache() | |
208 |
|
212 | |||
209 | # set new created state |
|
213 | # set new created state | |
210 | repo.set_state(Repository.STATE_CREATED) |
|
214 | repo.set_state(Repository.STATE_CREATED) | |
211 | repo_id = repo.repo_id |
|
215 | repo_id = repo.repo_id | |
212 | repo_data = repo.get_api_data() |
|
216 | repo_data = repo.get_api_data() | |
213 |
|
217 | |||
214 | audit_logger.store( |
|
218 | audit_logger.store( | |
215 | 'repo.create', action_data={'data': repo_data}, |
|
219 | 'repo.create', action_data={'data': repo_data}, | |
216 | user=cur_user, |
|
220 | user=cur_user, | |
217 | repo=audit_logger.RepoWrap(repo_name=repo_name, repo_id=repo_id)) |
|
221 | repo=audit_logger.RepoWrap(repo_name=repo_name, repo_id=repo_id)) | |
218 |
|
222 | |||
219 | Session().commit() |
|
223 | Session().commit() | |
220 |
|
224 | |||
221 | PermissionModel().trigger_permission_flush() |
|
225 | PermissionModel().trigger_permission_flush() | |
222 |
|
226 | |||
223 | except Exception as e: |
|
227 | except Exception as e: | |
224 | log.warning('Exception occurred when creating repository, ' |
|
228 | log.warning('Exception occurred when creating repository, ' | |
225 | 'doing cleanup...', exc_info=True) |
|
229 | 'doing cleanup...', exc_info=True) | |
226 | if isinstance(e, IntegrityError): |
|
230 | if isinstance(e, IntegrityError): | |
227 | Session().rollback() |
|
231 | Session().rollback() | |
228 |
|
232 | |||
229 | # rollback things manually ! |
|
233 | # rollback things manually ! | |
230 | repo = Repository.get_by_repo_name(repo_name_full) |
|
234 | repo = Repository.get_by_repo_name(repo_name_full) | |
231 | if repo: |
|
235 | if repo: | |
232 | Repository.delete(repo.repo_id) |
|
236 | Repository.delete(repo.repo_id) | |
233 | Session().commit() |
|
237 | Session().commit() | |
234 | RepoModel()._delete_filesystem_repo(repo) |
|
238 | RepoModel()._delete_filesystem_repo(repo) | |
235 | log.info('Cleanup of repo %s finished', repo_name_full) |
|
239 | log.info('Cleanup of repo %s finished', repo_name_full) | |
236 | raise |
|
240 | raise | |
237 |
|
241 | |||
238 | return True |
|
242 | return True | |
239 |
|
243 | |||
240 |
|
244 | |||
241 | @async_task(ignore_result=True, base=RequestContextTask) |
|
245 | @async_task(ignore_result=True, base=RequestContextTask) | |
242 | def create_repo_fork(form_data, cur_user): |
|
246 | def create_repo_fork(form_data, cur_user): | |
243 | """ |
|
247 | """ | |
244 | Creates a fork of repository using internal VCS methods |
|
248 | Creates a fork of repository using internal VCS methods | |
245 | """ |
|
249 | """ | |
246 | from rhodecode.model.repo import RepoModel |
|
250 | from rhodecode.model.repo import RepoModel | |
247 | from rhodecode.model.user import UserModel |
|
251 | from rhodecode.model.user import UserModel | |
248 |
|
252 | |||
249 | log = get_logger(create_repo_fork) |
|
253 | log = get_logger(create_repo_fork) | |
250 |
|
254 | |||
251 | cur_user = UserModel()._get_user(cur_user) |
|
255 | cur_user = UserModel()._get_user(cur_user) | |
252 | owner = cur_user |
|
256 | owner = cur_user | |
253 |
|
257 | |||
254 | repo_name = form_data['repo_name'] # fork in this case |
|
258 | repo_name = form_data['repo_name'] # fork in this case | |
255 | repo_name_full = form_data['repo_name_full'] |
|
259 | repo_name_full = form_data['repo_name_full'] | |
256 | repo_type = form_data['repo_type'] |
|
260 | repo_type = form_data['repo_type'] | |
257 | description = form_data['description'] |
|
261 | description = form_data['description'] | |
258 | private = form_data['private'] |
|
262 | private = form_data['private'] | |
259 | clone_uri = form_data.get('clone_uri') |
|
263 | clone_uri = form_data.get('clone_uri') | |
260 | repo_group = safe_int(form_data['repo_group']) |
|
264 | repo_group = safe_int(form_data['repo_group']) | |
261 | landing_ref = form_data['landing_rev'] |
|
265 | landing_ref = form_data['landing_rev'] | |
262 | copy_fork_permissions = form_data.get('copy_permissions') |
|
266 | copy_fork_permissions = form_data.get('copy_permissions') | |
263 | fork_id = safe_int(form_data.get('fork_parent_id')) |
|
267 | fork_id = safe_int(form_data.get('fork_parent_id')) | |
264 |
|
268 | |||
265 | try: |
|
269 | try: | |
266 | fork_of = RepoModel()._get_repo(fork_id) |
|
270 | fork_of = RepoModel()._get_repo(fork_id) | |
267 | RepoModel()._create_repo( |
|
271 | RepoModel()._create_repo( | |
268 | repo_name=repo_name_full, |
|
272 | repo_name=repo_name_full, | |
269 | repo_type=repo_type, |
|
273 | repo_type=repo_type, | |
270 | description=description, |
|
274 | description=description, | |
271 | owner=owner, |
|
275 | owner=owner, | |
272 | private=private, |
|
276 | private=private, | |
273 | clone_uri=clone_uri, |
|
277 | clone_uri=clone_uri, | |
274 | repo_group=repo_group, |
|
278 | repo_group=repo_group, | |
275 | landing_rev=landing_ref, |
|
279 | landing_rev=landing_ref, | |
276 | fork_of=fork_of, |
|
280 | fork_of=fork_of, | |
277 | copy_fork_permissions=copy_fork_permissions |
|
281 | copy_fork_permissions=copy_fork_permissions | |
278 | ) |
|
282 | ) | |
279 |
|
283 | |||
280 | Session().commit() |
|
284 | Session().commit() | |
281 |
|
285 | |||
282 | base_path = Repository.base_path() |
|
286 | base_path = Repository.base_path() | |
283 | source_repo_path = os.path.join(base_path, fork_of.repo_name) |
|
287 | source_repo_path = os.path.join(base_path, fork_of.repo_name) | |
284 |
|
288 | |||
285 | # now create this repo on Filesystem |
|
289 | # now create this repo on Filesystem | |
286 | RepoModel()._create_filesystem_repo( |
|
290 | RepoModel()._create_filesystem_repo( | |
287 | repo_name=repo_name, |
|
291 | repo_name=repo_name, | |
288 | repo_type=repo_type, |
|
292 | repo_type=repo_type, | |
289 | repo_group=RepoModel()._get_repo_group(repo_group), |
|
293 | repo_group=RepoModel()._get_repo_group(repo_group), | |
290 | clone_uri=source_repo_path, |
|
294 | clone_uri=source_repo_path, | |
291 | ) |
|
295 | ) | |
292 | repo = Repository.get_by_repo_name(repo_name_full) |
|
296 | repo = Repository.get_by_repo_name(repo_name_full) | |
293 | hooks_base.create_repository(created_by=owner.username, **repo.get_dict()) |
|
297 | hooks_base.create_repository(created_by=owner.username, **repo.get_dict()) | |
294 |
|
298 | |||
295 | # update repo commit caches initially |
|
299 | # update repo commit caches initially | |
296 | config = repo._config |
|
300 | config = repo._config | |
297 | config.set('extensions', 'largefiles', '') |
|
301 | config.set('extensions', 'largefiles', '') | |
298 | repo.update_commit_cache(config=config) |
|
302 | repo.update_commit_cache(config=config) | |
299 |
|
303 | |||
300 | # set new created state |
|
304 | # set new created state | |
301 | repo.set_state(Repository.STATE_CREATED) |
|
305 | repo.set_state(Repository.STATE_CREATED) | |
302 |
|
306 | |||
303 | repo_id = repo.repo_id |
|
307 | repo_id = repo.repo_id | |
304 | repo_data = repo.get_api_data() |
|
308 | repo_data = repo.get_api_data() | |
305 | audit_logger.store( |
|
309 | audit_logger.store( | |
306 | 'repo.fork', action_data={'data': repo_data}, |
|
310 | 'repo.fork', action_data={'data': repo_data}, | |
307 | user=cur_user, |
|
311 | user=cur_user, | |
308 | repo=audit_logger.RepoWrap(repo_name=repo_name, repo_id=repo_id)) |
|
312 | repo=audit_logger.RepoWrap(repo_name=repo_name, repo_id=repo_id)) | |
309 |
|
313 | |||
310 | Session().commit() |
|
314 | Session().commit() | |
311 | except Exception as e: |
|
315 | except Exception as e: | |
312 | log.warning('Exception occurred when forking repository, ' |
|
316 | log.warning('Exception occurred when forking repository, ' | |
313 | 'doing cleanup...', exc_info=True) |
|
317 | 'doing cleanup...', exc_info=True) | |
314 | if isinstance(e, IntegrityError): |
|
318 | if isinstance(e, IntegrityError): | |
315 | Session().rollback() |
|
319 | Session().rollback() | |
316 |
|
320 | |||
317 | # rollback things manually ! |
|
321 | # rollback things manually ! | |
318 | repo = Repository.get_by_repo_name(repo_name_full) |
|
322 | repo = Repository.get_by_repo_name(repo_name_full) | |
319 | if repo: |
|
323 | if repo: | |
320 | Repository.delete(repo.repo_id) |
|
324 | Repository.delete(repo.repo_id) | |
321 | Session().commit() |
|
325 | Session().commit() | |
322 | RepoModel()._delete_filesystem_repo(repo) |
|
326 | RepoModel()._delete_filesystem_repo(repo) | |
323 | log.info('Cleanup of repo %s finished', repo_name_full) |
|
327 | log.info('Cleanup of repo %s finished', repo_name_full) | |
324 | raise |
|
328 | raise | |
325 |
|
329 | |||
326 | return True |
|
330 | return True | |
327 |
|
331 | |||
328 |
|
332 | |||
329 | @async_task(ignore_result=True) |
|
333 | @async_task(ignore_result=True) | |
330 | def repo_maintenance(repoid): |
|
334 | def repo_maintenance(repoid): | |
331 | from rhodecode.lib import repo_maintenance as repo_maintenance_lib |
|
335 | from rhodecode.lib import repo_maintenance as repo_maintenance_lib | |
332 | log = get_logger(repo_maintenance) |
|
336 | log = get_logger(repo_maintenance) | |
333 | repo = Repository.get_by_id_or_repo_name(repoid) |
|
337 | repo = Repository.get_by_id_or_repo_name(repoid) | |
334 | if repo: |
|
338 | if repo: | |
335 | maintenance = repo_maintenance_lib.RepoMaintenance() |
|
339 | maintenance = repo_maintenance_lib.RepoMaintenance() | |
336 | tasks = maintenance.get_tasks_for_repo(repo) |
|
340 | tasks = maintenance.get_tasks_for_repo(repo) | |
337 | log.debug('Executing %s tasks on repo `%s`', tasks, repoid) |
|
341 | log.debug('Executing %s tasks on repo `%s`', tasks, repoid) | |
338 | executed_types = maintenance.execute(repo) |
|
342 | executed_types = maintenance.execute(repo) | |
339 | log.debug('Got execution results %s', executed_types) |
|
343 | log.debug('Got execution results %s', executed_types) | |
340 | else: |
|
344 | else: | |
341 | log.debug('Repo `%s` not found or without a clone_url', repoid) |
|
345 | log.debug('Repo `%s` not found or without a clone_url', repoid) | |
342 |
|
346 | |||
343 |
|
347 | |||
344 | @async_task(ignore_result=True) |
|
348 | @async_task(ignore_result=True) | |
345 | def check_for_update(send_email_notification=True, email_recipients=None): |
|
349 | def check_for_update(send_email_notification=True, email_recipients=None): | |
346 | from rhodecode.model.update import UpdateModel |
|
350 | from rhodecode.model.update import UpdateModel | |
347 | from rhodecode.model.notification import EmailNotificationModel |
|
351 | from rhodecode.model.notification import EmailNotificationModel | |
348 |
|
352 | |||
349 | log = get_logger(check_for_update) |
|
353 | log = get_logger(check_for_update) | |
350 | update_url = UpdateModel().get_update_url() |
|
354 | update_url = UpdateModel().get_update_url() | |
351 | cur_ver = rhodecode.__version__ |
|
355 | cur_ver = rhodecode.__version__ | |
352 |
|
356 | |||
353 | try: |
|
357 | try: | |
354 | data = UpdateModel().get_update_data(update_url) |
|
358 | data = UpdateModel().get_update_data(update_url) | |
355 |
|
359 | |||
356 | current_ver = UpdateModel().get_stored_version(fallback=cur_ver) |
|
360 | current_ver = UpdateModel().get_stored_version(fallback=cur_ver) | |
357 | latest_ver = data['versions'][0]['version'] |
|
361 | latest_ver = data['versions'][0]['version'] | |
358 | UpdateModel().store_version(latest_ver) |
|
362 | UpdateModel().store_version(latest_ver) | |
359 |
|
363 | |||
360 | if send_email_notification: |
|
364 | if send_email_notification: | |
361 | log.debug('Send email notification is enabled. ' |
|
365 | log.debug('Send email notification is enabled. ' | |
362 | 'Current RhodeCode version: %s, latest known: %s', current_ver, latest_ver) |
|
366 | 'Current RhodeCode version: %s, latest known: %s', current_ver, latest_ver) | |
363 | if UpdateModel().is_outdated(current_ver, latest_ver): |
|
367 | if UpdateModel().is_outdated(current_ver, latest_ver): | |
364 |
|
368 | |||
365 | email_kwargs = { |
|
369 | email_kwargs = { | |
366 | 'current_ver': current_ver, |
|
370 | 'current_ver': current_ver, | |
367 | 'latest_ver': latest_ver, |
|
371 | 'latest_ver': latest_ver, | |
368 | } |
|
372 | } | |
369 |
|
373 | |||
370 | (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email( |
|
374 | (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email( | |
371 | EmailNotificationModel.TYPE_UPDATE_AVAILABLE, **email_kwargs) |
|
375 | EmailNotificationModel.TYPE_UPDATE_AVAILABLE, **email_kwargs) | |
372 |
|
376 | |||
373 | email_recipients = aslist(email_recipients, sep=',') or \ |
|
377 | email_recipients = aslist(email_recipients, sep=',') or \ | |
374 | [user.email for user in User.get_all_super_admins()] |
|
378 | [user.email for user in User.get_all_super_admins()] | |
375 | run_task(send_email, email_recipients, subject, |
|
379 | run_task(send_email, email_recipients, subject, | |
376 | email_body_plaintext, email_body) |
|
380 | email_body_plaintext, email_body) | |
377 |
|
381 | |||
378 | except Exception: |
|
382 | except Exception: | |
379 | pass |
|
383 | pass | |
380 |
|
384 | |||
381 |
|
385 | |||
382 | @async_task(ignore_result=False) |
|
386 | @async_task(ignore_result=False) | |
383 | def beat_check(*args, **kwargs): |
|
387 | def beat_check(*args, **kwargs): | |
384 | log = get_logger(beat_check) |
|
388 | log = get_logger(beat_check) | |
385 | log.info('%r: Got args: %r and kwargs %r', beat_check, args, kwargs) |
|
389 | log.info('%r: Got args: %r and kwargs %r', beat_check, args, kwargs) | |
386 | return time.time() |
|
390 | return time.time() | |
387 |
|
391 | |||
388 |
|
392 | |||
389 | def sync_last_update_for_objects(*args, **kwargs): |
|
393 | def sync_last_update_for_objects(*args, **kwargs): | |
390 | skip_repos = kwargs.get('skip_repos') |
|
394 | skip_repos = kwargs.get('skip_repos') | |
391 | if not skip_repos: |
|
395 | if not skip_repos: | |
392 | repos = Repository.query() \ |
|
396 | repos = Repository.query() \ | |
393 | .order_by(Repository.group_id.asc()) |
|
397 | .order_by(Repository.group_id.asc()) | |
394 |
|
398 | |||
395 | for repo in repos: |
|
399 | for repo in repos: | |
396 | repo.update_commit_cache() |
|
400 | repo.update_commit_cache() | |
397 |
|
401 | |||
398 | skip_groups = kwargs.get('skip_groups') |
|
402 | skip_groups = kwargs.get('skip_groups') | |
399 | if not skip_groups: |
|
403 | if not skip_groups: | |
400 | repo_groups = RepoGroup.query() \ |
|
404 | repo_groups = RepoGroup.query() \ | |
401 | .filter(RepoGroup.group_parent_id == None) |
|
405 | .filter(RepoGroup.group_parent_id == None) | |
402 |
|
406 | |||
403 | for root_gr in repo_groups: |
|
407 | for root_gr in repo_groups: | |
404 | for repo_gr in reversed(root_gr.recursive_groups()): |
|
408 | for repo_gr in reversed(root_gr.recursive_groups()): | |
405 | repo_gr.update_commit_cache() |
|
409 | repo_gr.update_commit_cache() | |
406 |
|
410 | |||
407 |
|
411 | |||
408 | @async_task(ignore_result=True) |
|
412 | @async_task(ignore_result=True) | |
409 | def sync_last_update(*args, **kwargs): |
|
413 | def sync_last_update(*args, **kwargs): | |
410 | sync_last_update_for_objects(*args, **kwargs) |
|
414 | sync_last_update_for_objects(*args, **kwargs) |
@@ -1,535 +1,535 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2013-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2013-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 |
|
21 | |||
22 | """ |
|
22 | """ | |
23 | Set of hooks run by RhodeCode Enterprise |
|
23 | Set of hooks run by RhodeCode Enterprise | |
24 | """ |
|
24 | """ | |
25 |
|
25 | |||
26 | import os |
|
26 | import os | |
27 | import logging |
|
27 | import logging | |
28 |
|
28 | |||
29 | import rhodecode |
|
29 | import rhodecode | |
30 | from rhodecode import events |
|
30 | from rhodecode import events | |
31 | from rhodecode.lib import helpers as h |
|
31 | from rhodecode.lib import helpers as h | |
32 | from rhodecode.lib import audit_logger |
|
32 | from rhodecode.lib import audit_logger | |
33 | from rhodecode.lib.utils2 import safe_str |
|
33 | from rhodecode.lib.utils2 import safe_str | |
34 | from rhodecode.lib.exceptions import ( |
|
34 | from rhodecode.lib.exceptions import ( | |
35 | HTTPLockedRC, HTTPBranchProtected, UserCreationError) |
|
35 | HTTPLockedRC, HTTPBranchProtected, UserCreationError) | |
36 | from rhodecode.model.db import Repository, User |
|
36 | from rhodecode.model.db import Repository, User | |
37 | from rhodecode.lib.statsd_client import StatsdClient |
|
37 | from rhodecode.lib.statsd_client import StatsdClient | |
38 |
|
38 | |||
39 | log = logging.getLogger(__name__) |
|
39 | log = logging.getLogger(__name__) | |
40 |
|
40 | |||
41 |
|
41 | |||
42 | class HookResponse(object): |
|
42 | class HookResponse(object): | |
43 | def __init__(self, status, output): |
|
43 | def __init__(self, status, output): | |
44 | self.status = status |
|
44 | self.status = status | |
45 | self.output = output |
|
45 | self.output = output | |
46 |
|
46 | |||
47 | def __add__(self, other): |
|
47 | def __add__(self, other): | |
48 | other_status = getattr(other, 'status', 0) |
|
48 | other_status = getattr(other, 'status', 0) | |
49 | new_status = max(self.status, other_status) |
|
49 | new_status = max(self.status, other_status) | |
50 | other_output = getattr(other, 'output', '') |
|
50 | other_output = getattr(other, 'output', '') | |
51 | new_output = self.output + other_output |
|
51 | new_output = self.output + other_output | |
52 |
|
52 | |||
53 | return HookResponse(new_status, new_output) |
|
53 | return HookResponse(new_status, new_output) | |
54 |
|
54 | |||
55 | def __bool__(self): |
|
55 | def __bool__(self): | |
56 | return self.status == 0 |
|
56 | return self.status == 0 | |
57 |
|
57 | |||
58 |
|
58 | |||
59 | def is_shadow_repo(extras): |
|
59 | def is_shadow_repo(extras): | |
60 | """ |
|
60 | """ | |
61 | Returns ``True`` if this is an action executed against a shadow repository. |
|
61 | Returns ``True`` if this is an action executed against a shadow repository. | |
62 | """ |
|
62 | """ | |
63 | return extras['is_shadow_repo'] |
|
63 | return extras['is_shadow_repo'] | |
64 |
|
64 | |||
65 |
|
65 | |||
66 | def _get_scm_size(alias, root_path): |
|
66 | def _get_scm_size(alias, root_path): | |
67 |
|
67 | |||
68 | if not alias.startswith('.'): |
|
68 | if not alias.startswith('.'): | |
69 | alias += '.' |
|
69 | alias += '.' | |
70 |
|
70 | |||
71 | size_scm, size_root = 0, 0 |
|
71 | size_scm, size_root = 0, 0 | |
72 | for path, unused_dirs, files in os.walk(safe_str(root_path)): |
|
72 | for path, unused_dirs, files in os.walk(safe_str(root_path)): | |
73 | if path.find(alias) != -1: |
|
73 | if path.find(alias) != -1: | |
74 | for f in files: |
|
74 | for f in files: | |
75 | try: |
|
75 | try: | |
76 | size_scm += os.path.getsize(os.path.join(path, f)) |
|
76 | size_scm += os.path.getsize(os.path.join(path, f)) | |
77 | except OSError: |
|
77 | except OSError: | |
78 | pass |
|
78 | pass | |
79 | else: |
|
79 | else: | |
80 | for f in files: |
|
80 | for f in files: | |
81 | try: |
|
81 | try: | |
82 | size_root += os.path.getsize(os.path.join(path, f)) |
|
82 | size_root += os.path.getsize(os.path.join(path, f)) | |
83 | except OSError: |
|
83 | except OSError: | |
84 | pass |
|
84 | pass | |
85 |
|
85 | |||
86 | size_scm_f = h.format_byte_size_binary(size_scm) |
|
86 | size_scm_f = h.format_byte_size_binary(size_scm) | |
87 | size_root_f = h.format_byte_size_binary(size_root) |
|
87 | size_root_f = h.format_byte_size_binary(size_root) | |
88 | size_total_f = h.format_byte_size_binary(size_root + size_scm) |
|
88 | size_total_f = h.format_byte_size_binary(size_root + size_scm) | |
89 |
|
89 | |||
90 | return size_scm_f, size_root_f, size_total_f |
|
90 | return size_scm_f, size_root_f, size_total_f | |
91 |
|
91 | |||
92 |
|
92 | |||
93 | # actual hooks called by Mercurial internally, and GIT by our Python Hooks |
|
93 | # actual hooks called by Mercurial internally, and GIT by our Python Hooks | |
94 | def repo_size(extras): |
|
94 | def repo_size(extras): | |
95 | """Present size of repository after push.""" |
|
95 | """Present size of repository after push.""" | |
96 | repo = Repository.get_by_repo_name(extras.repository) |
|
96 | repo = Repository.get_by_repo_name(extras.repository) | |
97 | vcs_part = safe_str(u'.%s' % repo.repo_type) |
|
97 | vcs_part = safe_str(u'.%s' % repo.repo_type) | |
98 | size_vcs, size_root, size_total = _get_scm_size(vcs_part, |
|
98 | size_vcs, size_root, size_total = _get_scm_size(vcs_part, | |
99 | repo.repo_full_path) |
|
99 | repo.repo_full_path) | |
100 | msg = ('Repository `%s` size summary %s:%s repo:%s total:%s\n' |
|
100 | msg = ('Repository `%s` size summary %s:%s repo:%s total:%s\n' | |
101 | % (repo.repo_name, vcs_part, size_vcs, size_root, size_total)) |
|
101 | % (repo.repo_name, vcs_part, size_vcs, size_root, size_total)) | |
102 | return HookResponse(0, msg) |
|
102 | return HookResponse(0, msg) | |
103 |
|
103 | |||
104 |
|
104 | |||
105 | def pre_push(extras): |
|
105 | def pre_push(extras): | |
106 | """ |
|
106 | """ | |
107 | Hook executed before pushing code. |
|
107 | Hook executed before pushing code. | |
108 |
|
108 | |||
109 | It bans pushing when the repository is locked. |
|
109 | It bans pushing when the repository is locked. | |
110 | """ |
|
110 | """ | |
111 |
|
111 | |||
112 | user = User.get_by_username(extras.username) |
|
112 | user = User.get_by_username(extras.username) | |
113 | output = '' |
|
113 | output = '' | |
114 | if extras.locked_by[0] and user.user_id != int(extras.locked_by[0]): |
|
114 | if extras.locked_by[0] and user.user_id != int(extras.locked_by[0]): | |
115 | locked_by = User.get(extras.locked_by[0]).username |
|
115 | locked_by = User.get(extras.locked_by[0]).username | |
116 | reason = extras.locked_by[2] |
|
116 | reason = extras.locked_by[2] | |
117 | # this exception is interpreted in git/hg middlewares and based |
|
117 | # this exception is interpreted in git/hg middlewares and based | |
118 | # on that proper return code is server to client |
|
118 | # on that proper return code is server to client | |
119 | _http_ret = HTTPLockedRC( |
|
119 | _http_ret = HTTPLockedRC( | |
120 | _locked_by_explanation(extras.repository, locked_by, reason)) |
|
120 | _locked_by_explanation(extras.repository, locked_by, reason)) | |
121 | if str(_http_ret.code).startswith('2'): |
|
121 | if str(_http_ret.code).startswith('2'): | |
122 | # 2xx Codes don't raise exceptions |
|
122 | # 2xx Codes don't raise exceptions | |
123 | output = _http_ret.title |
|
123 | output = _http_ret.title | |
124 | else: |
|
124 | else: | |
125 | raise _http_ret |
|
125 | raise _http_ret | |
126 |
|
126 | |||
127 | hook_response = '' |
|
127 | hook_response = '' | |
128 | if not is_shadow_repo(extras): |
|
128 | if not is_shadow_repo(extras): | |
129 | if extras.commit_ids and extras.check_branch_perms: |
|
129 | if extras.commit_ids and extras.check_branch_perms: | |
130 |
|
130 | |||
131 | auth_user = user.AuthUser() |
|
131 | auth_user = user.AuthUser() | |
132 | repo = Repository.get_by_repo_name(extras.repository) |
|
132 | repo = Repository.get_by_repo_name(extras.repository) | |
133 | affected_branches = [] |
|
133 | affected_branches = [] | |
134 | if repo.repo_type == 'hg': |
|
134 | if repo.repo_type == 'hg': | |
135 | for entry in extras.commit_ids: |
|
135 | for entry in extras.commit_ids: | |
136 | if entry['type'] == 'branch': |
|
136 | if entry['type'] == 'branch': | |
137 | is_forced = bool(entry['multiple_heads']) |
|
137 | is_forced = bool(entry['multiple_heads']) | |
138 | affected_branches.append([entry['name'], is_forced]) |
|
138 | affected_branches.append([entry['name'], is_forced]) | |
139 | elif repo.repo_type == 'git': |
|
139 | elif repo.repo_type == 'git': | |
140 | for entry in extras.commit_ids: |
|
140 | for entry in extras.commit_ids: | |
141 | if entry['type'] == 'heads': |
|
141 | if entry['type'] == 'heads': | |
142 | is_forced = bool(entry['pruned_sha']) |
|
142 | is_forced = bool(entry['pruned_sha']) | |
143 | affected_branches.append([entry['name'], is_forced]) |
|
143 | affected_branches.append([entry['name'], is_forced]) | |
144 |
|
144 | |||
145 | for branch_name, is_forced in affected_branches: |
|
145 | for branch_name, is_forced in affected_branches: | |
146 |
|
146 | |||
147 | rule, branch_perm = auth_user.get_rule_and_branch_permission( |
|
147 | rule, branch_perm = auth_user.get_rule_and_branch_permission( | |
148 | extras.repository, branch_name) |
|
148 | extras.repository, branch_name) | |
149 | if not branch_perm: |
|
149 | if not branch_perm: | |
150 | # no branch permission found for this branch, just keep checking |
|
150 | # no branch permission found for this branch, just keep checking | |
151 | continue |
|
151 | continue | |
152 |
|
152 | |||
153 | if branch_perm == 'branch.push_force': |
|
153 | if branch_perm == 'branch.push_force': | |
154 | continue |
|
154 | continue | |
155 | elif branch_perm == 'branch.push' and is_forced is False: |
|
155 | elif branch_perm == 'branch.push' and is_forced is False: | |
156 | continue |
|
156 | continue | |
157 | elif branch_perm == 'branch.push' and is_forced is True: |
|
157 | elif branch_perm == 'branch.push' and is_forced is True: | |
158 | halt_message = 'Branch `{}` changes rejected by rule {}. ' \ |
|
158 | halt_message = 'Branch `{}` changes rejected by rule {}. ' \ | |
159 | 'FORCE PUSH FORBIDDEN.'.format(branch_name, rule) |
|
159 | 'FORCE PUSH FORBIDDEN.'.format(branch_name, rule) | |
160 | else: |
|
160 | else: | |
161 | halt_message = 'Branch `{}` changes rejected by rule {}.'.format( |
|
161 | halt_message = 'Branch `{}` changes rejected by rule {}.'.format( | |
162 | branch_name, rule) |
|
162 | branch_name, rule) | |
163 |
|
163 | |||
164 | if halt_message: |
|
164 | if halt_message: | |
165 | _http_ret = HTTPBranchProtected(halt_message) |
|
165 | _http_ret = HTTPBranchProtected(halt_message) | |
166 | raise _http_ret |
|
166 | raise _http_ret | |
167 |
|
167 | |||
168 | # Propagate to external components. This is done after checking the |
|
168 | # Propagate to external components. This is done after checking the | |
169 | # lock, for consistent behavior. |
|
169 | # lock, for consistent behavior. | |
170 | hook_response = pre_push_extension( |
|
170 | hook_response = pre_push_extension( | |
171 | repo_store_path=Repository.base_path(), **extras) |
|
171 | repo_store_path=Repository.base_path(), **extras) | |
172 | events.trigger(events.RepoPrePushEvent( |
|
172 | events.trigger(events.RepoPrePushEvent( | |
173 | repo_name=extras.repository, extras=extras)) |
|
173 | repo_name=extras.repository, extras=extras)) | |
174 |
|
174 | |||
175 | return HookResponse(0, output) + hook_response |
|
175 | return HookResponse(0, output) + hook_response | |
176 |
|
176 | |||
177 |
|
177 | |||
178 | def pre_pull(extras): |
|
178 | def pre_pull(extras): | |
179 | """ |
|
179 | """ | |
180 | Hook executed before pulling the code. |
|
180 | Hook executed before pulling the code. | |
181 |
|
181 | |||
182 | It bans pulling when the repository is locked. |
|
182 | It bans pulling when the repository is locked. | |
183 | """ |
|
183 | """ | |
184 |
|
184 | |||
185 | output = '' |
|
185 | output = '' | |
186 | if extras.locked_by[0]: |
|
186 | if extras.locked_by[0]: | |
187 | locked_by = User.get(extras.locked_by[0]).username |
|
187 | locked_by = User.get(extras.locked_by[0]).username | |
188 | reason = extras.locked_by[2] |
|
188 | reason = extras.locked_by[2] | |
189 | # this exception is interpreted in git/hg middlewares and based |
|
189 | # this exception is interpreted in git/hg middlewares and based | |
190 | # on that proper return code is server to client |
|
190 | # on that proper return code is server to client | |
191 | _http_ret = HTTPLockedRC( |
|
191 | _http_ret = HTTPLockedRC( | |
192 | _locked_by_explanation(extras.repository, locked_by, reason)) |
|
192 | _locked_by_explanation(extras.repository, locked_by, reason)) | |
193 | if str(_http_ret.code).startswith('2'): |
|
193 | if str(_http_ret.code).startswith('2'): | |
194 | # 2xx Codes don't raise exceptions |
|
194 | # 2xx Codes don't raise exceptions | |
195 | output = _http_ret.title |
|
195 | output = _http_ret.title | |
196 | else: |
|
196 | else: | |
197 | raise _http_ret |
|
197 | raise _http_ret | |
198 |
|
198 | |||
199 | # Propagate to external components. This is done after checking the |
|
199 | # Propagate to external components. This is done after checking the | |
200 | # lock, for consistent behavior. |
|
200 | # lock, for consistent behavior. | |
201 | hook_response = '' |
|
201 | hook_response = '' | |
202 | if not is_shadow_repo(extras): |
|
202 | if not is_shadow_repo(extras): | |
203 | extras.hook_type = extras.hook_type or 'pre_pull' |
|
203 | extras.hook_type = extras.hook_type or 'pre_pull' | |
204 | hook_response = pre_pull_extension( |
|
204 | hook_response = pre_pull_extension( | |
205 | repo_store_path=Repository.base_path(), **extras) |
|
205 | repo_store_path=Repository.base_path(), **extras) | |
206 | events.trigger(events.RepoPrePullEvent( |
|
206 | events.trigger(events.RepoPrePullEvent( | |
207 | repo_name=extras.repository, extras=extras)) |
|
207 | repo_name=extras.repository, extras=extras)) | |
208 |
|
208 | |||
209 | return HookResponse(0, output) + hook_response |
|
209 | return HookResponse(0, output) + hook_response | |
210 |
|
210 | |||
211 |
|
211 | |||
212 | def post_pull(extras): |
|
212 | def post_pull(extras): | |
213 | """Hook executed after client pulls the code.""" |
|
213 | """Hook executed after client pulls the code.""" | |
214 |
|
214 | |||
215 | audit_user = audit_logger.UserWrap( |
|
215 | audit_user = audit_logger.UserWrap( | |
216 | username=extras.username, |
|
216 | username=extras.username, | |
217 | ip_addr=extras.ip) |
|
217 | ip_addr=extras.ip) | |
218 | repo = audit_logger.RepoWrap(repo_name=extras.repository) |
|
218 | repo = audit_logger.RepoWrap(repo_name=extras.repository) | |
219 | audit_logger.store( |
|
219 | audit_logger.store( | |
220 | 'user.pull', action_data={'user_agent': extras.user_agent}, |
|
220 | 'user.pull', action_data={'user_agent': extras.user_agent}, | |
221 | user=audit_user, repo=repo, commit=True) |
|
221 | user=audit_user, repo=repo, commit=True) | |
222 |
|
222 | |||
223 | statsd = StatsdClient.statsd |
|
223 | statsd = StatsdClient.statsd | |
224 | if statsd: |
|
224 | if statsd: | |
225 | statsd.incr('rhodecode_pull') |
|
225 | statsd.incr('rhodecode_pull_total') | |
226 |
|
226 | |||
227 | output = '' |
|
227 | output = '' | |
228 | # make lock is a tri state False, True, None. We only make lock on True |
|
228 | # make lock is a tri state False, True, None. We only make lock on True | |
229 | if extras.make_lock is True and not is_shadow_repo(extras): |
|
229 | if extras.make_lock is True and not is_shadow_repo(extras): | |
230 | user = User.get_by_username(extras.username) |
|
230 | user = User.get_by_username(extras.username) | |
231 | Repository.lock(Repository.get_by_repo_name(extras.repository), |
|
231 | Repository.lock(Repository.get_by_repo_name(extras.repository), | |
232 | user.user_id, |
|
232 | user.user_id, | |
233 | lock_reason=Repository.LOCK_PULL) |
|
233 | lock_reason=Repository.LOCK_PULL) | |
234 | msg = 'Made lock on repo `%s`' % (extras.repository,) |
|
234 | msg = 'Made lock on repo `%s`' % (extras.repository,) | |
235 | output += msg |
|
235 | output += msg | |
236 |
|
236 | |||
237 | if extras.locked_by[0]: |
|
237 | if extras.locked_by[0]: | |
238 | locked_by = User.get(extras.locked_by[0]).username |
|
238 | locked_by = User.get(extras.locked_by[0]).username | |
239 | reason = extras.locked_by[2] |
|
239 | reason = extras.locked_by[2] | |
240 | _http_ret = HTTPLockedRC( |
|
240 | _http_ret = HTTPLockedRC( | |
241 | _locked_by_explanation(extras.repository, locked_by, reason)) |
|
241 | _locked_by_explanation(extras.repository, locked_by, reason)) | |
242 | if str(_http_ret.code).startswith('2'): |
|
242 | if str(_http_ret.code).startswith('2'): | |
243 | # 2xx Codes don't raise exceptions |
|
243 | # 2xx Codes don't raise exceptions | |
244 | output += _http_ret.title |
|
244 | output += _http_ret.title | |
245 |
|
245 | |||
246 | # Propagate to external components. |
|
246 | # Propagate to external components. | |
247 | hook_response = '' |
|
247 | hook_response = '' | |
248 | if not is_shadow_repo(extras): |
|
248 | if not is_shadow_repo(extras): | |
249 | extras.hook_type = extras.hook_type or 'post_pull' |
|
249 | extras.hook_type = extras.hook_type or 'post_pull' | |
250 | hook_response = post_pull_extension( |
|
250 | hook_response = post_pull_extension( | |
251 | repo_store_path=Repository.base_path(), **extras) |
|
251 | repo_store_path=Repository.base_path(), **extras) | |
252 | events.trigger(events.RepoPullEvent( |
|
252 | events.trigger(events.RepoPullEvent( | |
253 | repo_name=extras.repository, extras=extras)) |
|
253 | repo_name=extras.repository, extras=extras)) | |
254 |
|
254 | |||
255 | return HookResponse(0, output) + hook_response |
|
255 | return HookResponse(0, output) + hook_response | |
256 |
|
256 | |||
257 |
|
257 | |||
258 | def post_push(extras): |
|
258 | def post_push(extras): | |
259 | """Hook executed after user pushes to the repository.""" |
|
259 | """Hook executed after user pushes to the repository.""" | |
260 | commit_ids = extras.commit_ids |
|
260 | commit_ids = extras.commit_ids | |
261 |
|
261 | |||
262 | # log the push call |
|
262 | # log the push call | |
263 | audit_user = audit_logger.UserWrap( |
|
263 | audit_user = audit_logger.UserWrap( | |
264 | username=extras.username, ip_addr=extras.ip) |
|
264 | username=extras.username, ip_addr=extras.ip) | |
265 | repo = audit_logger.RepoWrap(repo_name=extras.repository) |
|
265 | repo = audit_logger.RepoWrap(repo_name=extras.repository) | |
266 | audit_logger.store( |
|
266 | audit_logger.store( | |
267 | 'user.push', action_data={ |
|
267 | 'user.push', action_data={ | |
268 | 'user_agent': extras.user_agent, |
|
268 | 'user_agent': extras.user_agent, | |
269 | 'commit_ids': commit_ids[:400]}, |
|
269 | 'commit_ids': commit_ids[:400]}, | |
270 | user=audit_user, repo=repo, commit=True) |
|
270 | user=audit_user, repo=repo, commit=True) | |
271 |
|
271 | |||
272 | statsd = StatsdClient.statsd |
|
272 | statsd = StatsdClient.statsd | |
273 | if statsd: |
|
273 | if statsd: | |
274 | statsd.incr('rhodecode_push') |
|
274 | statsd.incr('rhodecode_push_total') | |
275 |
|
275 | |||
276 | # Propagate to external components. |
|
276 | # Propagate to external components. | |
277 | output = '' |
|
277 | output = '' | |
278 | # make lock is a tri state False, True, None. We only release lock on False |
|
278 | # make lock is a tri state False, True, None. We only release lock on False | |
279 | if extras.make_lock is False and not is_shadow_repo(extras): |
|
279 | if extras.make_lock is False and not is_shadow_repo(extras): | |
280 | Repository.unlock(Repository.get_by_repo_name(extras.repository)) |
|
280 | Repository.unlock(Repository.get_by_repo_name(extras.repository)) | |
281 | msg = 'Released lock on repo `{}`\n'.format(safe_str(extras.repository)) |
|
281 | msg = 'Released lock on repo `{}`\n'.format(safe_str(extras.repository)) | |
282 | output += msg |
|
282 | output += msg | |
283 |
|
283 | |||
284 | if extras.locked_by[0]: |
|
284 | if extras.locked_by[0]: | |
285 | locked_by = User.get(extras.locked_by[0]).username |
|
285 | locked_by = User.get(extras.locked_by[0]).username | |
286 | reason = extras.locked_by[2] |
|
286 | reason = extras.locked_by[2] | |
287 | _http_ret = HTTPLockedRC( |
|
287 | _http_ret = HTTPLockedRC( | |
288 | _locked_by_explanation(extras.repository, locked_by, reason)) |
|
288 | _locked_by_explanation(extras.repository, locked_by, reason)) | |
289 | # TODO: johbo: if not? |
|
289 | # TODO: johbo: if not? | |
290 | if str(_http_ret.code).startswith('2'): |
|
290 | if str(_http_ret.code).startswith('2'): | |
291 | # 2xx Codes don't raise exceptions |
|
291 | # 2xx Codes don't raise exceptions | |
292 | output += _http_ret.title |
|
292 | output += _http_ret.title | |
293 |
|
293 | |||
294 | if extras.new_refs: |
|
294 | if extras.new_refs: | |
295 | tmpl = '{}/{}/pull-request/new?{{ref_type}}={{ref_name}}'.format( |
|
295 | tmpl = '{}/{}/pull-request/new?{{ref_type}}={{ref_name}}'.format( | |
296 | safe_str(extras.server_url), safe_str(extras.repository)) |
|
296 | safe_str(extras.server_url), safe_str(extras.repository)) | |
297 |
|
297 | |||
298 | for branch_name in extras.new_refs['branches']: |
|
298 | for branch_name in extras.new_refs['branches']: | |
299 | output += 'RhodeCode: open pull request link: {}\n'.format( |
|
299 | output += 'RhodeCode: open pull request link: {}\n'.format( | |
300 | tmpl.format(ref_type='branch', ref_name=safe_str(branch_name))) |
|
300 | tmpl.format(ref_type='branch', ref_name=safe_str(branch_name))) | |
301 |
|
301 | |||
302 | for book_name in extras.new_refs['bookmarks']: |
|
302 | for book_name in extras.new_refs['bookmarks']: | |
303 | output += 'RhodeCode: open pull request link: {}\n'.format( |
|
303 | output += 'RhodeCode: open pull request link: {}\n'.format( | |
304 | tmpl.format(ref_type='bookmark', ref_name=safe_str(book_name))) |
|
304 | tmpl.format(ref_type='bookmark', ref_name=safe_str(book_name))) | |
305 |
|
305 | |||
306 | hook_response = '' |
|
306 | hook_response = '' | |
307 | if not is_shadow_repo(extras): |
|
307 | if not is_shadow_repo(extras): | |
308 | hook_response = post_push_extension( |
|
308 | hook_response = post_push_extension( | |
309 | repo_store_path=Repository.base_path(), |
|
309 | repo_store_path=Repository.base_path(), | |
310 | **extras) |
|
310 | **extras) | |
311 | events.trigger(events.RepoPushEvent( |
|
311 | events.trigger(events.RepoPushEvent( | |
312 | repo_name=extras.repository, pushed_commit_ids=commit_ids, extras=extras)) |
|
312 | repo_name=extras.repository, pushed_commit_ids=commit_ids, extras=extras)) | |
313 |
|
313 | |||
314 | output += 'RhodeCode: push completed\n' |
|
314 | output += 'RhodeCode: push completed\n' | |
315 | return HookResponse(0, output) + hook_response |
|
315 | return HookResponse(0, output) + hook_response | |
316 |
|
316 | |||
317 |
|
317 | |||
318 | def _locked_by_explanation(repo_name, user_name, reason): |
|
318 | def _locked_by_explanation(repo_name, user_name, reason): | |
319 | message = ( |
|
319 | message = ( | |
320 | 'Repository `%s` locked by user `%s`. Reason:`%s`' |
|
320 | 'Repository `%s` locked by user `%s`. Reason:`%s`' | |
321 | % (repo_name, user_name, reason)) |
|
321 | % (repo_name, user_name, reason)) | |
322 | return message |
|
322 | return message | |
323 |
|
323 | |||
324 |
|
324 | |||
325 | def check_allowed_create_user(user_dict, created_by, **kwargs): |
|
325 | def check_allowed_create_user(user_dict, created_by, **kwargs): | |
326 | # pre create hooks |
|
326 | # pre create hooks | |
327 | if pre_create_user.is_active(): |
|
327 | if pre_create_user.is_active(): | |
328 | hook_result = pre_create_user(created_by=created_by, **user_dict) |
|
328 | hook_result = pre_create_user(created_by=created_by, **user_dict) | |
329 | allowed = hook_result.status == 0 |
|
329 | allowed = hook_result.status == 0 | |
330 | if not allowed: |
|
330 | if not allowed: | |
331 | reason = hook_result.output |
|
331 | reason = hook_result.output | |
332 | raise UserCreationError(reason) |
|
332 | raise UserCreationError(reason) | |
333 |
|
333 | |||
334 |
|
334 | |||
335 | class ExtensionCallback(object): |
|
335 | class ExtensionCallback(object): | |
336 | """ |
|
336 | """ | |
337 | Forwards a given call to rcextensions, sanitizes keyword arguments. |
|
337 | Forwards a given call to rcextensions, sanitizes keyword arguments. | |
338 |
|
338 | |||
339 | Does check if there is an extension active for that hook. If it is |
|
339 | Does check if there is an extension active for that hook. If it is | |
340 | there, it will forward all `kwargs_keys` keyword arguments to the |
|
340 | there, it will forward all `kwargs_keys` keyword arguments to the | |
341 | extension callback. |
|
341 | extension callback. | |
342 | """ |
|
342 | """ | |
343 |
|
343 | |||
344 | def __init__(self, hook_name, kwargs_keys): |
|
344 | def __init__(self, hook_name, kwargs_keys): | |
345 | self._hook_name = hook_name |
|
345 | self._hook_name = hook_name | |
346 | self._kwargs_keys = set(kwargs_keys) |
|
346 | self._kwargs_keys = set(kwargs_keys) | |
347 |
|
347 | |||
348 | def __call__(self, *args, **kwargs): |
|
348 | def __call__(self, *args, **kwargs): | |
349 | log.debug('Calling extension callback for `%s`', self._hook_name) |
|
349 | log.debug('Calling extension callback for `%s`', self._hook_name) | |
350 | callback = self._get_callback() |
|
350 | callback = self._get_callback() | |
351 | if not callback: |
|
351 | if not callback: | |
352 | log.debug('extension callback `%s` not found, skipping...', self._hook_name) |
|
352 | log.debug('extension callback `%s` not found, skipping...', self._hook_name) | |
353 | return |
|
353 | return | |
354 |
|
354 | |||
355 | kwargs_to_pass = {} |
|
355 | kwargs_to_pass = {} | |
356 | for key in self._kwargs_keys: |
|
356 | for key in self._kwargs_keys: | |
357 | try: |
|
357 | try: | |
358 | kwargs_to_pass[key] = kwargs[key] |
|
358 | kwargs_to_pass[key] = kwargs[key] | |
359 | except KeyError: |
|
359 | except KeyError: | |
360 | log.error('Failed to fetch %s key from given kwargs. ' |
|
360 | log.error('Failed to fetch %s key from given kwargs. ' | |
361 | 'Expected keys: %s', key, self._kwargs_keys) |
|
361 | 'Expected keys: %s', key, self._kwargs_keys) | |
362 | raise |
|
362 | raise | |
363 |
|
363 | |||
364 | # backward compat for removed api_key for old hooks. This was it works |
|
364 | # backward compat for removed api_key for old hooks. This was it works | |
365 | # with older rcextensions that require api_key present |
|
365 | # with older rcextensions that require api_key present | |
366 | if self._hook_name in ['CREATE_USER_HOOK', 'DELETE_USER_HOOK']: |
|
366 | if self._hook_name in ['CREATE_USER_HOOK', 'DELETE_USER_HOOK']: | |
367 | kwargs_to_pass['api_key'] = '_DEPRECATED_' |
|
367 | kwargs_to_pass['api_key'] = '_DEPRECATED_' | |
368 | return callback(**kwargs_to_pass) |
|
368 | return callback(**kwargs_to_pass) | |
369 |
|
369 | |||
370 | def is_active(self): |
|
370 | def is_active(self): | |
371 | return hasattr(rhodecode.EXTENSIONS, self._hook_name) |
|
371 | return hasattr(rhodecode.EXTENSIONS, self._hook_name) | |
372 |
|
372 | |||
373 | def _get_callback(self): |
|
373 | def _get_callback(self): | |
374 | return getattr(rhodecode.EXTENSIONS, self._hook_name, None) |
|
374 | return getattr(rhodecode.EXTENSIONS, self._hook_name, None) | |
375 |
|
375 | |||
376 |
|
376 | |||
377 | pre_pull_extension = ExtensionCallback( |
|
377 | pre_pull_extension = ExtensionCallback( | |
378 | hook_name='PRE_PULL_HOOK', |
|
378 | hook_name='PRE_PULL_HOOK', | |
379 | kwargs_keys=( |
|
379 | kwargs_keys=( | |
380 | 'server_url', 'config', 'scm', 'username', 'ip', 'action', |
|
380 | 'server_url', 'config', 'scm', 'username', 'ip', 'action', | |
381 | 'repository', 'hook_type', 'user_agent', 'repo_store_path',)) |
|
381 | 'repository', 'hook_type', 'user_agent', 'repo_store_path',)) | |
382 |
|
382 | |||
383 |
|
383 | |||
384 | post_pull_extension = ExtensionCallback( |
|
384 | post_pull_extension = ExtensionCallback( | |
385 | hook_name='PULL_HOOK', |
|
385 | hook_name='PULL_HOOK', | |
386 | kwargs_keys=( |
|
386 | kwargs_keys=( | |
387 | 'server_url', 'config', 'scm', 'username', 'ip', 'action', |
|
387 | 'server_url', 'config', 'scm', 'username', 'ip', 'action', | |
388 | 'repository', 'hook_type', 'user_agent', 'repo_store_path',)) |
|
388 | 'repository', 'hook_type', 'user_agent', 'repo_store_path',)) | |
389 |
|
389 | |||
390 |
|
390 | |||
391 | pre_push_extension = ExtensionCallback( |
|
391 | pre_push_extension = ExtensionCallback( | |
392 | hook_name='PRE_PUSH_HOOK', |
|
392 | hook_name='PRE_PUSH_HOOK', | |
393 | kwargs_keys=( |
|
393 | kwargs_keys=( | |
394 | 'server_url', 'config', 'scm', 'username', 'ip', 'action', |
|
394 | 'server_url', 'config', 'scm', 'username', 'ip', 'action', | |
395 | 'repository', 'repo_store_path', 'commit_ids', 'hook_type', 'user_agent',)) |
|
395 | 'repository', 'repo_store_path', 'commit_ids', 'hook_type', 'user_agent',)) | |
396 |
|
396 | |||
397 |
|
397 | |||
398 | post_push_extension = ExtensionCallback( |
|
398 | post_push_extension = ExtensionCallback( | |
399 | hook_name='PUSH_HOOK', |
|
399 | hook_name='PUSH_HOOK', | |
400 | kwargs_keys=( |
|
400 | kwargs_keys=( | |
401 | 'server_url', 'config', 'scm', 'username', 'ip', 'action', |
|
401 | 'server_url', 'config', 'scm', 'username', 'ip', 'action', | |
402 | 'repository', 'repo_store_path', 'commit_ids', 'hook_type', 'user_agent',)) |
|
402 | 'repository', 'repo_store_path', 'commit_ids', 'hook_type', 'user_agent',)) | |
403 |
|
403 | |||
404 |
|
404 | |||
405 | pre_create_user = ExtensionCallback( |
|
405 | pre_create_user = ExtensionCallback( | |
406 | hook_name='PRE_CREATE_USER_HOOK', |
|
406 | hook_name='PRE_CREATE_USER_HOOK', | |
407 | kwargs_keys=( |
|
407 | kwargs_keys=( | |
408 | 'username', 'password', 'email', 'firstname', 'lastname', 'active', |
|
408 | 'username', 'password', 'email', 'firstname', 'lastname', 'active', | |
409 | 'admin', 'created_by')) |
|
409 | 'admin', 'created_by')) | |
410 |
|
410 | |||
411 |
|
411 | |||
412 | create_pull_request = ExtensionCallback( |
|
412 | create_pull_request = ExtensionCallback( | |
413 | hook_name='CREATE_PULL_REQUEST', |
|
413 | hook_name='CREATE_PULL_REQUEST', | |
414 | kwargs_keys=( |
|
414 | kwargs_keys=( | |
415 | 'server_url', 'config', 'scm', 'username', 'ip', 'action', |
|
415 | 'server_url', 'config', 'scm', 'username', 'ip', 'action', | |
416 | 'repository', 'pull_request_id', 'url', 'title', 'description', |
|
416 | 'repository', 'pull_request_id', 'url', 'title', 'description', | |
417 | 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status', |
|
417 | 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status', | |
418 | 'mergeable', 'source', 'target', 'author', 'reviewers')) |
|
418 | 'mergeable', 'source', 'target', 'author', 'reviewers')) | |
419 |
|
419 | |||
420 |
|
420 | |||
421 | merge_pull_request = ExtensionCallback( |
|
421 | merge_pull_request = ExtensionCallback( | |
422 | hook_name='MERGE_PULL_REQUEST', |
|
422 | hook_name='MERGE_PULL_REQUEST', | |
423 | kwargs_keys=( |
|
423 | kwargs_keys=( | |
424 | 'server_url', 'config', 'scm', 'username', 'ip', 'action', |
|
424 | 'server_url', 'config', 'scm', 'username', 'ip', 'action', | |
425 | 'repository', 'pull_request_id', 'url', 'title', 'description', |
|
425 | 'repository', 'pull_request_id', 'url', 'title', 'description', | |
426 | 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status', |
|
426 | 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status', | |
427 | 'mergeable', 'source', 'target', 'author', 'reviewers')) |
|
427 | 'mergeable', 'source', 'target', 'author', 'reviewers')) | |
428 |
|
428 | |||
429 |
|
429 | |||
430 | close_pull_request = ExtensionCallback( |
|
430 | close_pull_request = ExtensionCallback( | |
431 | hook_name='CLOSE_PULL_REQUEST', |
|
431 | hook_name='CLOSE_PULL_REQUEST', | |
432 | kwargs_keys=( |
|
432 | kwargs_keys=( | |
433 | 'server_url', 'config', 'scm', 'username', 'ip', 'action', |
|
433 | 'server_url', 'config', 'scm', 'username', 'ip', 'action', | |
434 | 'repository', 'pull_request_id', 'url', 'title', 'description', |
|
434 | 'repository', 'pull_request_id', 'url', 'title', 'description', | |
435 | 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status', |
|
435 | 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status', | |
436 | 'mergeable', 'source', 'target', 'author', 'reviewers')) |
|
436 | 'mergeable', 'source', 'target', 'author', 'reviewers')) | |
437 |
|
437 | |||
438 |
|
438 | |||
439 | review_pull_request = ExtensionCallback( |
|
439 | review_pull_request = ExtensionCallback( | |
440 | hook_name='REVIEW_PULL_REQUEST', |
|
440 | hook_name='REVIEW_PULL_REQUEST', | |
441 | kwargs_keys=( |
|
441 | kwargs_keys=( | |
442 | 'server_url', 'config', 'scm', 'username', 'ip', 'action', |
|
442 | 'server_url', 'config', 'scm', 'username', 'ip', 'action', | |
443 | 'repository', 'pull_request_id', 'url', 'title', 'description', |
|
443 | 'repository', 'pull_request_id', 'url', 'title', 'description', | |
444 | 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status', |
|
444 | 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status', | |
445 | 'mergeable', 'source', 'target', 'author', 'reviewers')) |
|
445 | 'mergeable', 'source', 'target', 'author', 'reviewers')) | |
446 |
|
446 | |||
447 |
|
447 | |||
448 | comment_pull_request = ExtensionCallback( |
|
448 | comment_pull_request = ExtensionCallback( | |
449 | hook_name='COMMENT_PULL_REQUEST', |
|
449 | hook_name='COMMENT_PULL_REQUEST', | |
450 | kwargs_keys=( |
|
450 | kwargs_keys=( | |
451 | 'server_url', 'config', 'scm', 'username', 'ip', 'action', |
|
451 | 'server_url', 'config', 'scm', 'username', 'ip', 'action', | |
452 | 'repository', 'pull_request_id', 'url', 'title', 'description', |
|
452 | 'repository', 'pull_request_id', 'url', 'title', 'description', | |
453 | 'status', 'comment', 'created_on', 'updated_on', 'commit_ids', 'review_status', |
|
453 | 'status', 'comment', 'created_on', 'updated_on', 'commit_ids', 'review_status', | |
454 | 'mergeable', 'source', 'target', 'author', 'reviewers')) |
|
454 | 'mergeable', 'source', 'target', 'author', 'reviewers')) | |
455 |
|
455 | |||
456 |
|
456 | |||
457 | comment_edit_pull_request = ExtensionCallback( |
|
457 | comment_edit_pull_request = ExtensionCallback( | |
458 | hook_name='COMMENT_EDIT_PULL_REQUEST', |
|
458 | hook_name='COMMENT_EDIT_PULL_REQUEST', | |
459 | kwargs_keys=( |
|
459 | kwargs_keys=( | |
460 | 'server_url', 'config', 'scm', 'username', 'ip', 'action', |
|
460 | 'server_url', 'config', 'scm', 'username', 'ip', 'action', | |
461 | 'repository', 'pull_request_id', 'url', 'title', 'description', |
|
461 | 'repository', 'pull_request_id', 'url', 'title', 'description', | |
462 | 'status', 'comment', 'created_on', 'updated_on', 'commit_ids', 'review_status', |
|
462 | 'status', 'comment', 'created_on', 'updated_on', 'commit_ids', 'review_status', | |
463 | 'mergeable', 'source', 'target', 'author', 'reviewers')) |
|
463 | 'mergeable', 'source', 'target', 'author', 'reviewers')) | |
464 |
|
464 | |||
465 |
|
465 | |||
466 | update_pull_request = ExtensionCallback( |
|
466 | update_pull_request = ExtensionCallback( | |
467 | hook_name='UPDATE_PULL_REQUEST', |
|
467 | hook_name='UPDATE_PULL_REQUEST', | |
468 | kwargs_keys=( |
|
468 | kwargs_keys=( | |
469 | 'server_url', 'config', 'scm', 'username', 'ip', 'action', |
|
469 | 'server_url', 'config', 'scm', 'username', 'ip', 'action', | |
470 | 'repository', 'pull_request_id', 'url', 'title', 'description', |
|
470 | 'repository', 'pull_request_id', 'url', 'title', 'description', | |
471 | 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status', |
|
471 | 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status', | |
472 | 'mergeable', 'source', 'target', 'author', 'reviewers')) |
|
472 | 'mergeable', 'source', 'target', 'author', 'reviewers')) | |
473 |
|
473 | |||
474 |
|
474 | |||
475 | create_user = ExtensionCallback( |
|
475 | create_user = ExtensionCallback( | |
476 | hook_name='CREATE_USER_HOOK', |
|
476 | hook_name='CREATE_USER_HOOK', | |
477 | kwargs_keys=( |
|
477 | kwargs_keys=( | |
478 | 'username', 'full_name_or_username', 'full_contact', 'user_id', |
|
478 | 'username', 'full_name_or_username', 'full_contact', 'user_id', | |
479 | 'name', 'firstname', 'short_contact', 'admin', 'lastname', |
|
479 | 'name', 'firstname', 'short_contact', 'admin', 'lastname', | |
480 | 'ip_addresses', 'extern_type', 'extern_name', |
|
480 | 'ip_addresses', 'extern_type', 'extern_name', | |
481 | 'email', 'api_keys', 'last_login', |
|
481 | 'email', 'api_keys', 'last_login', | |
482 | 'full_name', 'active', 'password', 'emails', |
|
482 | 'full_name', 'active', 'password', 'emails', | |
483 | 'inherit_default_permissions', 'created_by', 'created_on')) |
|
483 | 'inherit_default_permissions', 'created_by', 'created_on')) | |
484 |
|
484 | |||
485 |
|
485 | |||
486 | delete_user = ExtensionCallback( |
|
486 | delete_user = ExtensionCallback( | |
487 | hook_name='DELETE_USER_HOOK', |
|
487 | hook_name='DELETE_USER_HOOK', | |
488 | kwargs_keys=( |
|
488 | kwargs_keys=( | |
489 | 'username', 'full_name_or_username', 'full_contact', 'user_id', |
|
489 | 'username', 'full_name_or_username', 'full_contact', 'user_id', | |
490 | 'name', 'firstname', 'short_contact', 'admin', 'lastname', |
|
490 | 'name', 'firstname', 'short_contact', 'admin', 'lastname', | |
491 | 'ip_addresses', |
|
491 | 'ip_addresses', | |
492 | 'email', 'last_login', |
|
492 | 'email', 'last_login', | |
493 | 'full_name', 'active', 'password', 'emails', |
|
493 | 'full_name', 'active', 'password', 'emails', | |
494 | 'inherit_default_permissions', 'deleted_by')) |
|
494 | 'inherit_default_permissions', 'deleted_by')) | |
495 |
|
495 | |||
496 |
|
496 | |||
497 | create_repository = ExtensionCallback( |
|
497 | create_repository = ExtensionCallback( | |
498 | hook_name='CREATE_REPO_HOOK', |
|
498 | hook_name='CREATE_REPO_HOOK', | |
499 | kwargs_keys=( |
|
499 | kwargs_keys=( | |
500 | 'repo_name', 'repo_type', 'description', 'private', 'created_on', |
|
500 | 'repo_name', 'repo_type', 'description', 'private', 'created_on', | |
501 | 'enable_downloads', 'repo_id', 'user_id', 'enable_statistics', |
|
501 | 'enable_downloads', 'repo_id', 'user_id', 'enable_statistics', | |
502 | 'clone_uri', 'fork_id', 'group_id', 'created_by')) |
|
502 | 'clone_uri', 'fork_id', 'group_id', 'created_by')) | |
503 |
|
503 | |||
504 |
|
504 | |||
505 | delete_repository = ExtensionCallback( |
|
505 | delete_repository = ExtensionCallback( | |
506 | hook_name='DELETE_REPO_HOOK', |
|
506 | hook_name='DELETE_REPO_HOOK', | |
507 | kwargs_keys=( |
|
507 | kwargs_keys=( | |
508 | 'repo_name', 'repo_type', 'description', 'private', 'created_on', |
|
508 | 'repo_name', 'repo_type', 'description', 'private', 'created_on', | |
509 | 'enable_downloads', 'repo_id', 'user_id', 'enable_statistics', |
|
509 | 'enable_downloads', 'repo_id', 'user_id', 'enable_statistics', | |
510 | 'clone_uri', 'fork_id', 'group_id', 'deleted_by', 'deleted_on')) |
|
510 | 'clone_uri', 'fork_id', 'group_id', 'deleted_by', 'deleted_on')) | |
511 |
|
511 | |||
512 |
|
512 | |||
513 | comment_commit_repository = ExtensionCallback( |
|
513 | comment_commit_repository = ExtensionCallback( | |
514 | hook_name='COMMENT_COMMIT_REPO_HOOK', |
|
514 | hook_name='COMMENT_COMMIT_REPO_HOOK', | |
515 | kwargs_keys=( |
|
515 | kwargs_keys=( | |
516 | 'repo_name', 'repo_type', 'description', 'private', 'created_on', |
|
516 | 'repo_name', 'repo_type', 'description', 'private', 'created_on', | |
517 | 'enable_downloads', 'repo_id', 'user_id', 'enable_statistics', |
|
517 | 'enable_downloads', 'repo_id', 'user_id', 'enable_statistics', | |
518 | 'clone_uri', 'fork_id', 'group_id', |
|
518 | 'clone_uri', 'fork_id', 'group_id', | |
519 | 'repository', 'created_by', 'comment', 'commit')) |
|
519 | 'repository', 'created_by', 'comment', 'commit')) | |
520 |
|
520 | |||
521 | comment_edit_commit_repository = ExtensionCallback( |
|
521 | comment_edit_commit_repository = ExtensionCallback( | |
522 | hook_name='COMMENT_EDIT_COMMIT_REPO_HOOK', |
|
522 | hook_name='COMMENT_EDIT_COMMIT_REPO_HOOK', | |
523 | kwargs_keys=( |
|
523 | kwargs_keys=( | |
524 | 'repo_name', 'repo_type', 'description', 'private', 'created_on', |
|
524 | 'repo_name', 'repo_type', 'description', 'private', 'created_on', | |
525 | 'enable_downloads', 'repo_id', 'user_id', 'enable_statistics', |
|
525 | 'enable_downloads', 'repo_id', 'user_id', 'enable_statistics', | |
526 | 'clone_uri', 'fork_id', 'group_id', |
|
526 | 'clone_uri', 'fork_id', 'group_id', | |
527 | 'repository', 'created_by', 'comment', 'commit')) |
|
527 | 'repository', 'created_by', 'comment', 'commit')) | |
528 |
|
528 | |||
529 |
|
529 | |||
530 | create_repository_group = ExtensionCallback( |
|
530 | create_repository_group = ExtensionCallback( | |
531 | hook_name='CREATE_REPO_GROUP_HOOK', |
|
531 | hook_name='CREATE_REPO_GROUP_HOOK', | |
532 | kwargs_keys=( |
|
532 | kwargs_keys=( | |
533 | 'group_name', 'group_parent_id', 'group_description', |
|
533 | 'group_name', 'group_parent_id', 'group_description', | |
534 | 'group_id', 'user_id', 'created_by', 'created_on', |
|
534 | 'group_id', 'user_id', 'created_by', 'created_on', | |
535 | 'enable_locking')) |
|
535 | 'enable_locking')) |
@@ -1,90 +1,89 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2016-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2016-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import time |
|
21 | import time | |
22 | import logging |
|
22 | import logging | |
23 |
|
23 | |||
24 | import rhodecode |
|
24 | import rhodecode | |
25 | from rhodecode.lib.auth import AuthUser |
|
25 | from rhodecode.lib.auth import AuthUser | |
26 | from rhodecode.lib.base import get_ip_addr, get_access_path, get_user_agent |
|
26 | from rhodecode.lib.base import get_ip_addr, get_access_path, get_user_agent | |
27 | from rhodecode.lib.utils2 import safe_str, get_current_rhodecode_user |
|
27 | from rhodecode.lib.utils2 import safe_str, get_current_rhodecode_user | |
28 |
|
28 | |||
29 |
|
29 | |||
30 | log = logging.getLogger(__name__) |
|
30 | log = logging.getLogger(__name__) | |
31 |
|
31 | |||
32 |
|
32 | |||
33 | class RequestWrapperTween(object): |
|
33 | class RequestWrapperTween(object): | |
34 | def __init__(self, handler, registry): |
|
34 | def __init__(self, handler, registry): | |
35 | self.handler = handler |
|
35 | self.handler = handler | |
36 | self.registry = registry |
|
36 | self.registry = registry | |
37 |
|
37 | |||
38 | # one-time configuration code goes here |
|
38 | # one-time configuration code goes here | |
39 |
|
39 | |||
40 | def _get_user_info(self, request): |
|
40 | def _get_user_info(self, request): | |
41 | user = get_current_rhodecode_user(request) |
|
41 | user = get_current_rhodecode_user(request) | |
42 | if not user: |
|
42 | if not user: | |
43 | user = AuthUser.repr_user(ip=get_ip_addr(request.environ)) |
|
43 | user = AuthUser.repr_user(ip=get_ip_addr(request.environ)) | |
44 | return user |
|
44 | return user | |
45 |
|
45 | |||
46 | def __call__(self, request): |
|
46 | def __call__(self, request): | |
47 | start = time.time() |
|
47 | start = time.time() | |
48 | log.debug('Starting request time measurement') |
|
48 | log.debug('Starting request time measurement') | |
49 | try: |
|
49 | try: | |
50 | response = self.handler(request) |
|
50 | response = self.handler(request) | |
51 | finally: |
|
51 | finally: | |
52 | count = request.request_count() |
|
52 | count = request.request_count() | |
53 | _ver_ = rhodecode.__version__ |
|
53 | _ver_ = rhodecode.__version__ | |
54 | _path = safe_str(get_access_path(request.environ)) |
|
54 | _path = safe_str(get_access_path(request.environ)) | |
55 | _auth_user = self._get_user_info(request) |
|
55 | _auth_user = self._get_user_info(request) | |
56 |
|
56 | user_id = getattr(_auth_user, 'user_id', _auth_user) | ||
57 | total = time.time() - start |
|
57 | total = time.time() - start | |
58 | log.info( |
|
58 | log.info( | |
59 | 'Req[%4s] %s %s Request to %s time: %.4fs [%s], RhodeCode %s', |
|
59 | 'Req[%4s] %s %s Request to %s time: %.4fs [%s], RhodeCode %s', | |
60 | count, _auth_user, request.environ.get('REQUEST_METHOD'), |
|
60 | count, _auth_user, request.environ.get('REQUEST_METHOD'), | |
61 | _path, total, get_user_agent(request. environ), _ver_ |
|
61 | _path, total, get_user_agent(request. environ), _ver_ | |
62 | ) |
|
62 | ) | |
63 |
|
63 | |||
64 | statsd = request.registry.statsd |
|
64 | statsd = request.registry.statsd | |
65 | if statsd: |
|
65 | if statsd: | |
66 | resp_code = response.status_code |
|
66 | resp_code = response.status_code | |
67 | user_id = getattr(_auth_user, 'user_id', _auth_user) |
|
|||
68 | elapsed_time_ms = 1000.0 * total |
|
67 | elapsed_time_ms = 1000.0 * total | |
69 | statsd.timing( |
|
68 | statsd.timing( | |
70 | 'rhodecode_req_timing', elapsed_time_ms, |
|
69 | 'rhodecode_req_timing', elapsed_time_ms, | |
71 | tags=[ |
|
70 | tags=[ | |
72 |
|
|
71 | "view_name:{}".format(request.matched_route.name), | |
73 | #"user:{}".format(user_id), |
|
72 | #"user:{}".format(user_id), | |
74 | "code:{}".format(resp_code) |
|
73 | "code:{}".format(resp_code) | |
75 | ] |
|
74 | ] | |
76 | ) |
|
75 | ) | |
77 | statsd.incr( |
|
76 | statsd.incr( | |
78 | 'rhodecode_req_count', tags=[ |
|
77 | 'rhodecode_req_count_total', tags=[ | |
79 |
|
|
78 | "view_name:{}".format(request.matched_route.name), | |
80 | #"user:{}".format(user_id), |
|
79 | #"user:{}".format(user_id), | |
81 | "code:{}".format(resp_code) |
|
80 | "code:{}".format(resp_code) | |
82 | ]) |
|
81 | ]) | |
83 |
|
82 | |||
84 | return response |
|
83 | return response | |
85 |
|
84 | |||
86 |
|
85 | |||
87 | def includeme(config): |
|
86 | def includeme(config): | |
88 | config.add_tween( |
|
87 | config.add_tween( | |
89 | 'rhodecode.lib.middleware.request_wrapper.RequestWrapperTween', |
|
88 | 'rhodecode.lib.middleware.request_wrapper.RequestWrapperTween', | |
90 | ) |
|
89 | ) |
General Comments 0
You need to be logged in to leave comments.
Login now