Show More
@@ -1,568 +1,576 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2011-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import itertools |
|
22 | 22 | import logging |
|
23 | 23 | import sys |
|
24 | 24 | import types |
|
25 | 25 | import fnmatch |
|
26 | 26 | |
|
27 | 27 | import decorator |
|
28 | 28 | import venusian |
|
29 | 29 | from collections import OrderedDict |
|
30 | 30 | |
|
31 | 31 | from pyramid.exceptions import ConfigurationError |
|
32 | 32 | from pyramid.renderers import render |
|
33 | 33 | from pyramid.response import Response |
|
34 | 34 | from pyramid.httpexceptions import HTTPNotFound |
|
35 | 35 | |
|
36 | 36 | from rhodecode.api.exc import ( |
|
37 | 37 | JSONRPCBaseError, JSONRPCError, JSONRPCForbidden, JSONRPCValidationError) |
|
38 | 38 | from rhodecode.apps._base import TemplateArgs |
|
39 | 39 | from rhodecode.lib.auth import AuthUser |
|
40 | 40 | from rhodecode.lib.base import get_ip_addr, attach_context_attributes |
|
41 | 41 | from rhodecode.lib.exc_tracking import store_exception |
|
42 | 42 | from rhodecode.lib.ext_json import json |
|
43 | 43 | from rhodecode.lib.utils2 import safe_str |
|
44 | 44 | from rhodecode.lib.plugins.utils import get_plugin_settings |
|
45 | 45 | from rhodecode.model.db import User, UserApiKeys |
|
46 | 46 | |
|
47 | 47 | log = logging.getLogger(__name__) |
|
48 | 48 | |
|
49 | 49 | DEFAULT_RENDERER = 'jsonrpc_renderer' |
|
50 | 50 | DEFAULT_URL = '/_admin/apiv2' |
|
51 | 51 | |
|
52 | 52 | |
|
53 | 53 | def find_methods(jsonrpc_methods, pattern): |
|
54 | 54 | matches = OrderedDict() |
|
55 | 55 | if not isinstance(pattern, (list, tuple)): |
|
56 | 56 | pattern = [pattern] |
|
57 | 57 | |
|
58 | 58 | for single_pattern in pattern: |
|
59 | 59 | for method_name, method in jsonrpc_methods.items(): |
|
60 | 60 | if fnmatch.fnmatch(method_name, single_pattern): |
|
61 | 61 | matches[method_name] = method |
|
62 | 62 | return matches |
|
63 | 63 | |
|
64 | 64 | |
|
65 | 65 | class ExtJsonRenderer(object): |
|
66 | 66 | """ |
|
67 | 67 | Custom renderer that mkaes use of our ext_json lib |
|
68 | 68 | |
|
69 | 69 | """ |
|
70 | 70 | |
|
71 | 71 | def __init__(self, serializer=json.dumps, **kw): |
|
72 | 72 | """ Any keyword arguments will be passed to the ``serializer`` |
|
73 | 73 | function.""" |
|
74 | 74 | self.serializer = serializer |
|
75 | 75 | self.kw = kw |
|
76 | 76 | |
|
77 | 77 | def __call__(self, info): |
|
78 | 78 | """ Returns a plain JSON-encoded string with content-type |
|
79 | 79 | ``application/json``. The content-type may be overridden by |
|
80 | 80 | setting ``request.response.content_type``.""" |
|
81 | 81 | |
|
82 | 82 | def _render(value, system): |
|
83 | 83 | request = system.get('request') |
|
84 | 84 | if request is not None: |
|
85 | 85 | response = request.response |
|
86 | 86 | ct = response.content_type |
|
87 | 87 | if ct == response.default_content_type: |
|
88 | 88 | response.content_type = 'application/json' |
|
89 | 89 | |
|
90 | 90 | return self.serializer(value, **self.kw) |
|
91 | 91 | |
|
92 | 92 | return _render |
|
93 | 93 | |
|
94 | 94 | |
|
95 | 95 | def jsonrpc_response(request, result): |
|
96 | 96 | rpc_id = getattr(request, 'rpc_id', None) |
|
97 | 97 | response = request.response |
|
98 | 98 | |
|
99 | 99 | # store content_type before render is called |
|
100 | 100 | ct = response.content_type |
|
101 | 101 | |
|
102 | 102 | ret_value = '' |
|
103 | 103 | if rpc_id: |
|
104 | 104 | ret_value = { |
|
105 | 105 | 'id': rpc_id, |
|
106 | 106 | 'result': result, |
|
107 | 107 | 'error': None, |
|
108 | 108 | } |
|
109 | 109 | |
|
110 | 110 | # fetch deprecation warnings, and store it inside results |
|
111 | 111 | deprecation = getattr(request, 'rpc_deprecation', None) |
|
112 | 112 | if deprecation: |
|
113 | 113 | ret_value['DEPRECATION_WARNING'] = deprecation |
|
114 | 114 | |
|
115 | 115 | raw_body = render(DEFAULT_RENDERER, ret_value, request=request) |
|
116 | 116 | response.body = safe_str(raw_body, response.charset) |
|
117 | 117 | |
|
118 | 118 | if ct == response.default_content_type: |
|
119 | 119 | response.content_type = 'application/json' |
|
120 | 120 | |
|
121 | 121 | return response |
|
122 | 122 | |
|
123 | 123 | |
|
124 | 124 | def jsonrpc_error(request, message, retid=None, code=None, headers=None): |
|
125 | 125 | """ |
|
126 | 126 | Generate a Response object with a JSON-RPC error body |
|
127 | 127 | |
|
128 | 128 | :param code: |
|
129 | 129 | :param retid: |
|
130 | 130 | :param message: |
|
131 | 131 | """ |
|
132 | 132 | err_dict = {'id': retid, 'result': None, 'error': message} |
|
133 | 133 | body = render(DEFAULT_RENDERER, err_dict, request=request).encode('utf-8') |
|
134 | 134 | |
|
135 | 135 | return Response( |
|
136 | 136 | body=body, |
|
137 | 137 | status=code, |
|
138 | 138 | content_type='application/json', |
|
139 | 139 | headerlist=headers |
|
140 | 140 | ) |
|
141 | 141 | |
|
142 | 142 | |
|
143 | 143 | def exception_view(exc, request): |
|
144 | 144 | rpc_id = getattr(request, 'rpc_id', None) |
|
145 | 145 | |
|
146 | 146 | if isinstance(exc, JSONRPCError): |
|
147 | 147 | fault_message = safe_str(exc.message) |
|
148 | 148 | log.debug('json-rpc error rpc_id:%s "%s"', rpc_id, fault_message) |
|
149 | 149 | elif isinstance(exc, JSONRPCValidationError): |
|
150 | 150 | colander_exc = exc.colander_exception |
|
151 | 151 | # TODO(marcink): think maybe of nicer way to serialize errors ? |
|
152 | 152 | fault_message = colander_exc.asdict() |
|
153 | 153 | log.debug('json-rpc colander error rpc_id:%s "%s"', rpc_id, fault_message) |
|
154 | 154 | elif isinstance(exc, JSONRPCForbidden): |
|
155 | 155 | fault_message = 'Access was denied to this resource.' |
|
156 | 156 | log.warning('json-rpc forbidden call rpc_id:%s "%s"', rpc_id, fault_message) |
|
157 | 157 | elif isinstance(exc, HTTPNotFound): |
|
158 | 158 | method = request.rpc_method |
|
159 | 159 | log.debug('json-rpc method `%s` not found in list of ' |
|
160 | 160 | 'api calls: %s, rpc_id:%s', |
|
161 | 161 | method, request.registry.jsonrpc_methods.keys(), rpc_id) |
|
162 | 162 | |
|
163 | 163 | similar = 'none' |
|
164 | 164 | try: |
|
165 | 165 | similar_paterns = ['*{}*'.format(x) for x in method.split('_')] |
|
166 | 166 | similar_found = find_methods( |
|
167 | 167 | request.registry.jsonrpc_methods, similar_paterns) |
|
168 | 168 | similar = ', '.join(similar_found.keys()) or similar |
|
169 | 169 | except Exception: |
|
170 | 170 | # make the whole above block safe |
|
171 | 171 | pass |
|
172 | 172 | |
|
173 | 173 | fault_message = "No such method: {}. Similar methods: {}".format( |
|
174 | 174 | method, similar) |
|
175 | 175 | else: |
|
176 | 176 | fault_message = 'undefined error' |
|
177 | 177 | exc_info = exc.exc_info() |
|
178 | 178 | store_exception(id(exc_info), exc_info, prefix='rhodecode-api') |
|
179 | 179 | |
|
180 | 180 | statsd = request.registry.statsd |
|
181 | 181 | if statsd: |
|
182 | statsd.incr('rhodecode_exception', tags=["api"]) | |
|
182 | statsd.incr('rhodecode_exception_total', tags=["exc_source:api"]) | |
|
183 | 183 | |
|
184 | 184 | return jsonrpc_error(request, fault_message, rpc_id) |
|
185 | 185 | |
|
186 | 186 | |
|
187 | 187 | def request_view(request): |
|
188 | 188 | """ |
|
189 | 189 | Main request handling method. It handles all logic to call a specific |
|
190 | 190 | exposed method |
|
191 | 191 | """ |
|
192 | 192 | # cython compatible inspect |
|
193 | 193 | from rhodecode.config.patches import inspect_getargspec |
|
194 | 194 | inspect = inspect_getargspec() |
|
195 | 195 | |
|
196 | 196 | # check if we can find this session using api_key, get_by_auth_token |
|
197 | 197 | # search not expired tokens only |
|
198 | 198 | try: |
|
199 | 199 | api_user = User.get_by_auth_token(request.rpc_api_key) |
|
200 | 200 | |
|
201 | 201 | if api_user is None: |
|
202 | 202 | return jsonrpc_error( |
|
203 | 203 | request, retid=request.rpc_id, message='Invalid API KEY') |
|
204 | 204 | |
|
205 | 205 | if not api_user.active: |
|
206 | 206 | return jsonrpc_error( |
|
207 | 207 | request, retid=request.rpc_id, |
|
208 | 208 | message='Request from this user not allowed') |
|
209 | 209 | |
|
210 | 210 | # check if we are allowed to use this IP |
|
211 | 211 | auth_u = AuthUser( |
|
212 | 212 | api_user.user_id, request.rpc_api_key, ip_addr=request.rpc_ip_addr) |
|
213 | 213 | if not auth_u.ip_allowed: |
|
214 | 214 | return jsonrpc_error( |
|
215 | 215 | request, retid=request.rpc_id, |
|
216 | 216 | message='Request from IP:%s not allowed' % ( |
|
217 | 217 | request.rpc_ip_addr,)) |
|
218 | 218 | else: |
|
219 | 219 | log.info('Access for IP:%s allowed', request.rpc_ip_addr) |
|
220 | 220 | |
|
221 | 221 | # register our auth-user |
|
222 | 222 | request.rpc_user = auth_u |
|
223 | 223 | request.environ['rc_auth_user_id'] = auth_u.user_id |
|
224 | 224 | |
|
225 | 225 | # now check if token is valid for API |
|
226 | 226 | auth_token = request.rpc_api_key |
|
227 | 227 | token_match = api_user.authenticate_by_token( |
|
228 | 228 | auth_token, roles=[UserApiKeys.ROLE_API]) |
|
229 | 229 | invalid_token = not token_match |
|
230 | 230 | |
|
231 | 231 | log.debug('Checking if API KEY is valid with proper role') |
|
232 | 232 | if invalid_token: |
|
233 | 233 | return jsonrpc_error( |
|
234 | 234 | request, retid=request.rpc_id, |
|
235 | 235 | message='API KEY invalid or, has bad role for an API call') |
|
236 | 236 | |
|
237 | 237 | except Exception: |
|
238 | 238 | log.exception('Error on API AUTH') |
|
239 | 239 | return jsonrpc_error( |
|
240 | 240 | request, retid=request.rpc_id, message='Invalid API KEY') |
|
241 | 241 | |
|
242 | 242 | method = request.rpc_method |
|
243 | 243 | func = request.registry.jsonrpc_methods[method] |
|
244 | 244 | |
|
245 | 245 | # now that we have a method, add request._req_params to |
|
246 | 246 | # self.kargs and dispatch control to WGIController |
|
247 | 247 | argspec = inspect.getargspec(func) |
|
248 | 248 | arglist = argspec[0] |
|
249 | 249 | defaults = map(type, argspec[3] or []) |
|
250 | 250 | default_empty = types.NotImplementedType |
|
251 | 251 | |
|
252 | 252 | # kw arguments required by this method |
|
253 | 253 | func_kwargs = dict(itertools.izip_longest( |
|
254 | 254 | reversed(arglist), reversed(defaults), fillvalue=default_empty)) |
|
255 | 255 | |
|
256 | 256 | # This attribute will need to be first param of a method that uses |
|
257 | 257 | # api_key, which is translated to instance of user at that name |
|
258 | 258 | user_var = 'apiuser' |
|
259 | 259 | request_var = 'request' |
|
260 | 260 | |
|
261 | 261 | for arg in [user_var, request_var]: |
|
262 | 262 | if arg not in arglist: |
|
263 | 263 | return jsonrpc_error( |
|
264 | 264 | request, |
|
265 | 265 | retid=request.rpc_id, |
|
266 | 266 | message='This method [%s] does not support ' |
|
267 | 267 | 'required parameter `%s`' % (func.__name__, arg)) |
|
268 | 268 | |
|
269 | 269 | # get our arglist and check if we provided them as args |
|
270 | 270 | for arg, default in func_kwargs.items(): |
|
271 | 271 | if arg in [user_var, request_var]: |
|
272 | 272 | # user_var and request_var are pre-hardcoded parameters and we |
|
273 | 273 | # don't need to do any translation |
|
274 | 274 | continue |
|
275 | 275 | |
|
276 | 276 | # skip the required param check if it's default value is |
|
277 | 277 | # NotImplementedType (default_empty) |
|
278 | 278 | if default == default_empty and arg not in request.rpc_params: |
|
279 | 279 | return jsonrpc_error( |
|
280 | 280 | request, |
|
281 | 281 | retid=request.rpc_id, |
|
282 | 282 | message=('Missing non optional `%s` arg in JSON DATA' % arg) |
|
283 | 283 | ) |
|
284 | 284 | |
|
285 | 285 | # sanitize extra passed arguments |
|
286 | 286 | for k in request.rpc_params.keys()[:]: |
|
287 | 287 | if k not in func_kwargs: |
|
288 | 288 | del request.rpc_params[k] |
|
289 | 289 | |
|
290 | 290 | call_params = request.rpc_params |
|
291 | 291 | call_params.update({ |
|
292 | 292 | 'request': request, |
|
293 | 293 | 'apiuser': auth_u |
|
294 | 294 | }) |
|
295 | 295 | |
|
296 | 296 | # register some common functions for usage |
|
297 | 297 | attach_context_attributes(TemplateArgs(), request, request.rpc_user.user_id) |
|
298 | 298 | |
|
299 | statsd = request.registry.statsd | |
|
300 | ||
|
299 | 301 | try: |
|
300 | 302 | ret_value = func(**call_params) |
|
301 |
re |
|
|
303 | resp = jsonrpc_response(request, ret_value) | |
|
304 | if statsd: | |
|
305 | statsd.incr('rhodecode_api_call_success_total') | |
|
306 | return resp | |
|
302 | 307 | except JSONRPCBaseError: |
|
303 | 308 | raise |
|
304 | 309 | except Exception: |
|
305 | 310 | log.exception('Unhandled exception occurred on api call: %s', func) |
|
306 | 311 | exc_info = sys.exc_info() |
|
307 | 312 | exc_id, exc_type_name = store_exception( |
|
308 | 313 | id(exc_info), exc_info, prefix='rhodecode-api') |
|
309 | 314 | error_headers = [('RhodeCode-Exception-Id', str(exc_id)), |
|
310 | 315 | ('RhodeCode-Exception-Type', str(exc_type_name))] |
|
311 |
|
|
|
316 | err_resp = jsonrpc_error( | |
|
312 | 317 | request, retid=request.rpc_id, message='Internal server error', |
|
313 | 318 | headers=error_headers) |
|
319 | if statsd: | |
|
320 | statsd.incr('rhodecode_api_call_fail_total') | |
|
321 | return err_resp | |
|
314 | 322 | |
|
315 | 323 | |
|
316 | 324 | def setup_request(request): |
|
317 | 325 | """ |
|
318 | 326 | Parse a JSON-RPC request body. It's used inside the predicates method |
|
319 | 327 | to validate and bootstrap requests for usage in rpc calls. |
|
320 | 328 | |
|
321 | 329 | We need to raise JSONRPCError here if we want to return some errors back to |
|
322 | 330 | user. |
|
323 | 331 | """ |
|
324 | 332 | |
|
325 | 333 | log.debug('Executing setup request: %r', request) |
|
326 | 334 | request.rpc_ip_addr = get_ip_addr(request.environ) |
|
327 | 335 | # TODO(marcink): deprecate GET at some point |
|
328 | 336 | if request.method not in ['POST', 'GET']: |
|
329 | 337 | log.debug('unsupported request method "%s"', request.method) |
|
330 | 338 | raise JSONRPCError( |
|
331 | 339 | 'unsupported request method "%s". Please use POST' % request.method) |
|
332 | 340 | |
|
333 | 341 | if 'CONTENT_LENGTH' not in request.environ: |
|
334 | 342 | log.debug("No Content-Length") |
|
335 | 343 | raise JSONRPCError("Empty body, No Content-Length in request") |
|
336 | 344 | |
|
337 | 345 | else: |
|
338 | 346 | length = request.environ['CONTENT_LENGTH'] |
|
339 | 347 | log.debug('Content-Length: %s', length) |
|
340 | 348 | |
|
341 | 349 | if length == 0: |
|
342 | 350 | log.debug("Content-Length is 0") |
|
343 | 351 | raise JSONRPCError("Content-Length is 0") |
|
344 | 352 | |
|
345 | 353 | raw_body = request.body |
|
346 | 354 | log.debug("Loading JSON body now") |
|
347 | 355 | try: |
|
348 | 356 | json_body = json.loads(raw_body) |
|
349 | 357 | except ValueError as e: |
|
350 | 358 | # catch JSON errors Here |
|
351 | 359 | raise JSONRPCError("JSON parse error ERR:%s RAW:%r" % (e, raw_body)) |
|
352 | 360 | |
|
353 | 361 | request.rpc_id = json_body.get('id') |
|
354 | 362 | request.rpc_method = json_body.get('method') |
|
355 | 363 | |
|
356 | 364 | # check required base parameters |
|
357 | 365 | try: |
|
358 | 366 | api_key = json_body.get('api_key') |
|
359 | 367 | if not api_key: |
|
360 | 368 | api_key = json_body.get('auth_token') |
|
361 | 369 | |
|
362 | 370 | if not api_key: |
|
363 | 371 | raise KeyError('api_key or auth_token') |
|
364 | 372 | |
|
365 | 373 | # TODO(marcink): support passing in token in request header |
|
366 | 374 | |
|
367 | 375 | request.rpc_api_key = api_key |
|
368 | 376 | request.rpc_id = json_body['id'] |
|
369 | 377 | request.rpc_method = json_body['method'] |
|
370 | 378 | request.rpc_params = json_body['args'] \ |
|
371 | 379 | if isinstance(json_body['args'], dict) else {} |
|
372 | 380 | |
|
373 | 381 | log.debug('method: %s, params: %.10240r', request.rpc_method, request.rpc_params) |
|
374 | 382 | except KeyError as e: |
|
375 | 383 | raise JSONRPCError('Incorrect JSON data. Missing %s' % e) |
|
376 | 384 | |
|
377 | 385 | log.debug('setup complete, now handling method:%s rpcid:%s', |
|
378 | 386 | request.rpc_method, request.rpc_id, ) |
|
379 | 387 | |
|
380 | 388 | |
|
381 | 389 | class RoutePredicate(object): |
|
382 | 390 | def __init__(self, val, config): |
|
383 | 391 | self.val = val |
|
384 | 392 | |
|
385 | 393 | def text(self): |
|
386 | 394 | return 'jsonrpc route = %s' % self.val |
|
387 | 395 | |
|
388 | 396 | phash = text |
|
389 | 397 | |
|
390 | 398 | def __call__(self, info, request): |
|
391 | 399 | if self.val: |
|
392 | 400 | # potentially setup and bootstrap our call |
|
393 | 401 | setup_request(request) |
|
394 | 402 | |
|
395 | 403 | # Always return True so that even if it isn't a valid RPC it |
|
396 | 404 | # will fall through to the underlaying handlers like notfound_view |
|
397 | 405 | return True |
|
398 | 406 | |
|
399 | 407 | |
|
400 | 408 | class NotFoundPredicate(object): |
|
401 | 409 | def __init__(self, val, config): |
|
402 | 410 | self.val = val |
|
403 | 411 | self.methods = config.registry.jsonrpc_methods |
|
404 | 412 | |
|
405 | 413 | def text(self): |
|
406 | 414 | return 'jsonrpc method not found = {}.'.format(self.val) |
|
407 | 415 | |
|
408 | 416 | phash = text |
|
409 | 417 | |
|
410 | 418 | def __call__(self, info, request): |
|
411 | 419 | return hasattr(request, 'rpc_method') |
|
412 | 420 | |
|
413 | 421 | |
|
414 | 422 | class MethodPredicate(object): |
|
415 | 423 | def __init__(self, val, config): |
|
416 | 424 | self.method = val |
|
417 | 425 | |
|
418 | 426 | def text(self): |
|
419 | 427 | return 'jsonrpc method = %s' % self.method |
|
420 | 428 | |
|
421 | 429 | phash = text |
|
422 | 430 | |
|
423 | 431 | def __call__(self, context, request): |
|
424 | 432 | # we need to explicitly return False here, so pyramid doesn't try to |
|
425 | 433 | # execute our view directly. We need our main handler to execute things |
|
426 | 434 | return getattr(request, 'rpc_method') == self.method |
|
427 | 435 | |
|
428 | 436 | |
|
429 | 437 | def add_jsonrpc_method(config, view, **kwargs): |
|
430 | 438 | # pop the method name |
|
431 | 439 | method = kwargs.pop('method', None) |
|
432 | 440 | |
|
433 | 441 | if method is None: |
|
434 | 442 | raise ConfigurationError( |
|
435 | 443 | 'Cannot register a JSON-RPC method without specifying the "method"') |
|
436 | 444 | |
|
437 | 445 | # we define custom predicate, to enable to detect conflicting methods, |
|
438 | 446 | # those predicates are kind of "translation" from the decorator variables |
|
439 | 447 | # to internal predicates names |
|
440 | 448 | |
|
441 | 449 | kwargs['jsonrpc_method'] = method |
|
442 | 450 | |
|
443 | 451 | # register our view into global view store for validation |
|
444 | 452 | config.registry.jsonrpc_methods[method] = view |
|
445 | 453 | |
|
446 | 454 | # we're using our main request_view handler, here, so each method |
|
447 | 455 | # has a unified handler for itself |
|
448 | 456 | config.add_view(request_view, route_name='apiv2', **kwargs) |
|
449 | 457 | |
|
450 | 458 | |
|
451 | 459 | class jsonrpc_method(object): |
|
452 | 460 | """ |
|
453 | 461 | decorator that works similar to @add_view_config decorator, |
|
454 | 462 | but tailored for our JSON RPC |
|
455 | 463 | """ |
|
456 | 464 | |
|
457 | 465 | venusian = venusian # for testing injection |
|
458 | 466 | |
|
459 | 467 | def __init__(self, method=None, **kwargs): |
|
460 | 468 | self.method = method |
|
461 | 469 | self.kwargs = kwargs |
|
462 | 470 | |
|
463 | 471 | def __call__(self, wrapped): |
|
464 | 472 | kwargs = self.kwargs.copy() |
|
465 | 473 | kwargs['method'] = self.method or wrapped.__name__ |
|
466 | 474 | depth = kwargs.pop('_depth', 0) |
|
467 | 475 | |
|
468 | 476 | def callback(context, name, ob): |
|
469 | 477 | config = context.config.with_package(info.module) |
|
470 | 478 | config.add_jsonrpc_method(view=ob, **kwargs) |
|
471 | 479 | |
|
472 | 480 | info = venusian.attach(wrapped, callback, category='pyramid', |
|
473 | 481 | depth=depth + 1) |
|
474 | 482 | if info.scope == 'class': |
|
475 | 483 | # ensure that attr is set if decorating a class method |
|
476 | 484 | kwargs.setdefault('attr', wrapped.__name__) |
|
477 | 485 | |
|
478 | 486 | kwargs['_info'] = info.codeinfo # fbo action_method |
|
479 | 487 | return wrapped |
|
480 | 488 | |
|
481 | 489 | |
|
482 | 490 | class jsonrpc_deprecated_method(object): |
|
483 | 491 | """ |
|
484 | 492 | Marks method as deprecated, adds log.warning, and inject special key to |
|
485 | 493 | the request variable to mark method as deprecated. |
|
486 | 494 | Also injects special docstring that extract_docs will catch to mark |
|
487 | 495 | method as deprecated. |
|
488 | 496 | |
|
489 | 497 | :param use_method: specify which method should be used instead of |
|
490 | 498 | the decorated one |
|
491 | 499 | |
|
492 | 500 | Use like:: |
|
493 | 501 | |
|
494 | 502 | @jsonrpc_method() |
|
495 | 503 | @jsonrpc_deprecated_method(use_method='new_func', deprecated_at_version='3.0.0') |
|
496 | 504 | def old_func(request, apiuser, arg1, arg2): |
|
497 | 505 | ... |
|
498 | 506 | """ |
|
499 | 507 | |
|
500 | 508 | def __init__(self, use_method, deprecated_at_version): |
|
501 | 509 | self.use_method = use_method |
|
502 | 510 | self.deprecated_at_version = deprecated_at_version |
|
503 | 511 | self.deprecated_msg = '' |
|
504 | 512 | |
|
505 | 513 | def __call__(self, func): |
|
506 | 514 | self.deprecated_msg = 'Please use method `{method}` instead.'.format( |
|
507 | 515 | method=self.use_method) |
|
508 | 516 | |
|
509 | 517 | docstring = """\n |
|
510 | 518 | .. deprecated:: {version} |
|
511 | 519 | |
|
512 | 520 | {deprecation_message} |
|
513 | 521 | |
|
514 | 522 | {original_docstring} |
|
515 | 523 | """ |
|
516 | 524 | func.__doc__ = docstring.format( |
|
517 | 525 | version=self.deprecated_at_version, |
|
518 | 526 | deprecation_message=self.deprecated_msg, |
|
519 | 527 | original_docstring=func.__doc__) |
|
520 | 528 | return decorator.decorator(self.__wrapper, func) |
|
521 | 529 | |
|
522 | 530 | def __wrapper(self, func, *fargs, **fkwargs): |
|
523 | 531 | log.warning('DEPRECATED API CALL on function %s, please ' |
|
524 | 532 | 'use `%s` instead', func, self.use_method) |
|
525 | 533 | # alter function docstring to mark as deprecated, this is picked up |
|
526 | 534 | # via fabric file that generates API DOC. |
|
527 | 535 | result = func(*fargs, **fkwargs) |
|
528 | 536 | |
|
529 | 537 | request = fargs[0] |
|
530 | 538 | request.rpc_deprecation = 'DEPRECATED METHOD ' + self.deprecated_msg |
|
531 | 539 | return result |
|
532 | 540 | |
|
533 | 541 | |
|
534 | 542 | def add_api_methods(config): |
|
535 | 543 | from rhodecode.api.views import ( |
|
536 | 544 | deprecated_api, gist_api, pull_request_api, repo_api, repo_group_api, |
|
537 | 545 | server_api, search_api, testing_api, user_api, user_group_api) |
|
538 | 546 | |
|
539 | 547 | config.scan('rhodecode.api.views') |
|
540 | 548 | |
|
541 | 549 | |
|
542 | 550 | def includeme(config): |
|
543 | 551 | plugin_module = 'rhodecode.api' |
|
544 | 552 | plugin_settings = get_plugin_settings( |
|
545 | 553 | plugin_module, config.registry.settings) |
|
546 | 554 | |
|
547 | 555 | if not hasattr(config.registry, 'jsonrpc_methods'): |
|
548 | 556 | config.registry.jsonrpc_methods = OrderedDict() |
|
549 | 557 | |
|
550 | 558 | # match filter by given method only |
|
551 | 559 | config.add_view_predicate('jsonrpc_method', MethodPredicate) |
|
552 | 560 | config.add_view_predicate('jsonrpc_method_not_found', NotFoundPredicate) |
|
553 | 561 | |
|
554 | 562 | config.add_renderer(DEFAULT_RENDERER, ExtJsonRenderer( |
|
555 | 563 | serializer=json.dumps, indent=4)) |
|
556 | 564 | config.add_directive('add_jsonrpc_method', add_jsonrpc_method) |
|
557 | 565 | |
|
558 | 566 | config.add_route_predicate( |
|
559 | 567 | 'jsonrpc_call', RoutePredicate) |
|
560 | 568 | |
|
561 | 569 | config.add_route( |
|
562 | 570 | 'apiv2', plugin_settings.get('url', DEFAULT_URL), jsonrpc_call=True) |
|
563 | 571 | |
|
564 | 572 | # register some exception handling view |
|
565 | 573 | config.add_view(exception_view, context=JSONRPCBaseError) |
|
566 | 574 | config.add_notfound_view(exception_view, jsonrpc_method_not_found=True) |
|
567 | 575 | |
|
568 | 576 | add_api_methods(config) |
@@ -1,809 +1,816 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | Authentication modules |
|
23 | 23 | """ |
|
24 | 24 | import socket |
|
25 | 25 | import string |
|
26 | 26 | import colander |
|
27 | 27 | import copy |
|
28 | 28 | import logging |
|
29 | 29 | import time |
|
30 | 30 | import traceback |
|
31 | 31 | import warnings |
|
32 | 32 | import functools |
|
33 | 33 | |
|
34 | 34 | from pyramid.threadlocal import get_current_registry |
|
35 | 35 | |
|
36 | 36 | from rhodecode.authentication.interface import IAuthnPluginRegistry |
|
37 | 37 | from rhodecode.authentication.schema import AuthnPluginSettingsSchemaBase |
|
38 | 38 | from rhodecode.lib import rc_cache |
|
39 | from rhodecode.lib.statsd_client import StatsdClient | |
|
39 | 40 | from rhodecode.lib.auth import PasswordGenerator, _RhodeCodeCryptoBCrypt |
|
40 | 41 | from rhodecode.lib.utils2 import safe_int, safe_str |
|
41 |
from rhodecode.lib.exceptions import LdapConnectionError, LdapUsernameError, |
|
|
42 | LdapPasswordError | |
|
42 | from rhodecode.lib.exceptions import (LdapConnectionError, LdapUsernameError, LdapPasswordError) | |
|
43 | 43 | from rhodecode.model.db import User |
|
44 | 44 | from rhodecode.model.meta import Session |
|
45 | 45 | from rhodecode.model.settings import SettingsModel |
|
46 | 46 | from rhodecode.model.user import UserModel |
|
47 | 47 | from rhodecode.model.user_group import UserGroupModel |
|
48 | 48 | |
|
49 | 49 | |
|
50 | 50 | log = logging.getLogger(__name__) |
|
51 | 51 | |
|
52 | 52 | # auth types that authenticate() function can receive |
|
53 | 53 | VCS_TYPE = 'vcs' |
|
54 | 54 | HTTP_TYPE = 'http' |
|
55 | 55 | |
|
56 | 56 | external_auth_session_key = 'rhodecode.external_auth' |
|
57 | 57 | |
|
58 | 58 | |
|
59 | 59 | class hybrid_property(object): |
|
60 | 60 | """ |
|
61 | 61 | a property decorator that works both for instance and class |
|
62 | 62 | """ |
|
63 | 63 | def __init__(self, fget, fset=None, fdel=None, expr=None): |
|
64 | 64 | self.fget = fget |
|
65 | 65 | self.fset = fset |
|
66 | 66 | self.fdel = fdel |
|
67 | 67 | self.expr = expr or fget |
|
68 | 68 | functools.update_wrapper(self, fget) |
|
69 | 69 | |
|
70 | 70 | def __get__(self, instance, owner): |
|
71 | 71 | if instance is None: |
|
72 | 72 | return self.expr(owner) |
|
73 | 73 | else: |
|
74 | 74 | return self.fget(instance) |
|
75 | 75 | |
|
76 | 76 | def __set__(self, instance, value): |
|
77 | 77 | self.fset(instance, value) |
|
78 | 78 | |
|
79 | 79 | def __delete__(self, instance): |
|
80 | 80 | self.fdel(instance) |
|
81 | 81 | |
|
82 | 82 | |
|
83 | 83 | class LazyFormencode(object): |
|
84 | 84 | def __init__(self, formencode_obj, *args, **kwargs): |
|
85 | 85 | self.formencode_obj = formencode_obj |
|
86 | 86 | self.args = args |
|
87 | 87 | self.kwargs = kwargs |
|
88 | 88 | |
|
89 | 89 | def __call__(self, *args, **kwargs): |
|
90 | 90 | from inspect import isfunction |
|
91 | 91 | formencode_obj = self.formencode_obj |
|
92 | 92 | if isfunction(formencode_obj): |
|
93 | 93 | # case we wrap validators into functions |
|
94 | 94 | formencode_obj = self.formencode_obj(*args, **kwargs) |
|
95 | 95 | return formencode_obj(*self.args, **self.kwargs) |
|
96 | 96 | |
|
97 | 97 | |
|
98 | 98 | class RhodeCodeAuthPluginBase(object): |
|
99 | 99 | # UID is used to register plugin to the registry |
|
100 | 100 | uid = None |
|
101 | 101 | |
|
102 | 102 | # cache the authentication request for N amount of seconds. Some kind |
|
103 | 103 | # of authentication methods are very heavy and it's very efficient to cache |
|
104 | 104 | # the result of a call. If it's set to None (default) cache is off |
|
105 | 105 | AUTH_CACHE_TTL = None |
|
106 | 106 | AUTH_CACHE = {} |
|
107 | 107 | |
|
108 | 108 | auth_func_attrs = { |
|
109 | 109 | "username": "unique username", |
|
110 | 110 | "firstname": "first name", |
|
111 | 111 | "lastname": "last name", |
|
112 | 112 | "email": "email address", |
|
113 | 113 | "groups": '["list", "of", "groups"]', |
|
114 | 114 | "user_group_sync": |
|
115 | 115 | 'True|False defines if returned user groups should be synced', |
|
116 | 116 | "extern_name": "name in external source of record", |
|
117 | 117 | "extern_type": "type of external source of record", |
|
118 | 118 | "admin": 'True|False defines if user should be RhodeCode super admin', |
|
119 | 119 | "active": |
|
120 | 120 | 'True|False defines active state of user internally for RhodeCode', |
|
121 | 121 | "active_from_extern": |
|
122 | 122 | "True|False|None, active state from the external auth, " |
|
123 | 123 | "None means use definition from RhodeCode extern_type active value" |
|
124 | 124 | |
|
125 | 125 | } |
|
126 | 126 | # set on authenticate() method and via set_auth_type func. |
|
127 | 127 | auth_type = None |
|
128 | 128 | |
|
129 | 129 | # set on authenticate() method and via set_calling_scope_repo, this is a |
|
130 | 130 | # calling scope repository when doing authentication most likely on VCS |
|
131 | 131 | # operations |
|
132 | 132 | acl_repo_name = None |
|
133 | 133 | |
|
134 | 134 | # List of setting names to store encrypted. Plugins may override this list |
|
135 | 135 | # to store settings encrypted. |
|
136 | 136 | _settings_encrypted = [] |
|
137 | 137 | |
|
138 | 138 | # Mapping of python to DB settings model types. Plugins may override or |
|
139 | 139 | # extend this mapping. |
|
140 | 140 | _settings_type_map = { |
|
141 | 141 | colander.String: 'unicode', |
|
142 | 142 | colander.Integer: 'int', |
|
143 | 143 | colander.Boolean: 'bool', |
|
144 | 144 | colander.List: 'list', |
|
145 | 145 | } |
|
146 | 146 | |
|
147 | 147 | # list of keys in settings that are unsafe to be logged, should be passwords |
|
148 | 148 | # or other crucial credentials |
|
149 | 149 | _settings_unsafe_keys = [] |
|
150 | 150 | |
|
151 | 151 | def __init__(self, plugin_id): |
|
152 | 152 | self._plugin_id = plugin_id |
|
153 | 153 | self._settings = {} |
|
154 | 154 | |
|
155 | 155 | def __str__(self): |
|
156 | 156 | return self.get_id() |
|
157 | 157 | |
|
158 | 158 | def _get_setting_full_name(self, name): |
|
159 | 159 | """ |
|
160 | 160 | Return the full setting name used for storing values in the database. |
|
161 | 161 | """ |
|
162 | 162 | # TODO: johbo: Using the name here is problematic. It would be good to |
|
163 | 163 | # introduce either new models in the database to hold Plugin and |
|
164 | 164 | # PluginSetting or to use the plugin id here. |
|
165 | 165 | return 'auth_{}_{}'.format(self.name, name) |
|
166 | 166 | |
|
167 | 167 | def _get_setting_type(self, name): |
|
168 | 168 | """ |
|
169 | 169 | Return the type of a setting. This type is defined by the SettingsModel |
|
170 | 170 | and determines how the setting is stored in DB. Optionally the suffix |
|
171 | 171 | `.encrypted` is appended to instruct SettingsModel to store it |
|
172 | 172 | encrypted. |
|
173 | 173 | """ |
|
174 | 174 | schema_node = self.get_settings_schema().get(name) |
|
175 | 175 | db_type = self._settings_type_map.get( |
|
176 | 176 | type(schema_node.typ), 'unicode') |
|
177 | 177 | if name in self._settings_encrypted: |
|
178 | 178 | db_type = '{}.encrypted'.format(db_type) |
|
179 | 179 | return db_type |
|
180 | 180 | |
|
181 | 181 | @classmethod |
|
182 | 182 | def docs(cls): |
|
183 | 183 | """ |
|
184 | 184 | Defines documentation url which helps with plugin setup |
|
185 | 185 | """ |
|
186 | 186 | return '' |
|
187 | 187 | |
|
188 | 188 | @classmethod |
|
189 | 189 | def icon(cls): |
|
190 | 190 | """ |
|
191 | 191 | Defines ICON in SVG format for authentication method |
|
192 | 192 | """ |
|
193 | 193 | return '' |
|
194 | 194 | |
|
195 | 195 | def is_enabled(self): |
|
196 | 196 | """ |
|
197 | 197 | Returns true if this plugin is enabled. An enabled plugin can be |
|
198 | 198 | configured in the admin interface but it is not consulted during |
|
199 | 199 | authentication. |
|
200 | 200 | """ |
|
201 | 201 | auth_plugins = SettingsModel().get_auth_plugins() |
|
202 | 202 | return self.get_id() in auth_plugins |
|
203 | 203 | |
|
204 | 204 | def is_active(self, plugin_cached_settings=None): |
|
205 | 205 | """ |
|
206 | 206 | Returns true if the plugin is activated. An activated plugin is |
|
207 | 207 | consulted during authentication, assumed it is also enabled. |
|
208 | 208 | """ |
|
209 | 209 | return self.get_setting_by_name( |
|
210 | 210 | 'enabled', plugin_cached_settings=plugin_cached_settings) |
|
211 | 211 | |
|
212 | 212 | def get_id(self): |
|
213 | 213 | """ |
|
214 | 214 | Returns the plugin id. |
|
215 | 215 | """ |
|
216 | 216 | return self._plugin_id |
|
217 | 217 | |
|
218 | 218 | def get_display_name(self, load_from_settings=False): |
|
219 | 219 | """ |
|
220 | 220 | Returns a translation string for displaying purposes. |
|
221 | 221 | if load_from_settings is set, plugin settings can override the display name |
|
222 | 222 | """ |
|
223 | 223 | raise NotImplementedError('Not implemented in base class') |
|
224 | 224 | |
|
225 | 225 | def get_settings_schema(self): |
|
226 | 226 | """ |
|
227 | 227 | Returns a colander schema, representing the plugin settings. |
|
228 | 228 | """ |
|
229 | 229 | return AuthnPluginSettingsSchemaBase() |
|
230 | 230 | |
|
231 | 231 | def _propagate_settings(self, raw_settings): |
|
232 | 232 | settings = {} |
|
233 | 233 | for node in self.get_settings_schema(): |
|
234 | 234 | settings[node.name] = self.get_setting_by_name( |
|
235 | 235 | node.name, plugin_cached_settings=raw_settings) |
|
236 | 236 | return settings |
|
237 | 237 | |
|
238 | 238 | def get_settings(self, use_cache=True): |
|
239 | 239 | """ |
|
240 | 240 | Returns the plugin settings as dictionary. |
|
241 | 241 | """ |
|
242 | 242 | if self._settings != {} and use_cache: |
|
243 | 243 | return self._settings |
|
244 | 244 | |
|
245 | 245 | raw_settings = SettingsModel().get_all_settings() |
|
246 | 246 | settings = self._propagate_settings(raw_settings) |
|
247 | 247 | |
|
248 | 248 | self._settings = settings |
|
249 | 249 | return self._settings |
|
250 | 250 | |
|
251 | 251 | def get_setting_by_name(self, name, default=None, plugin_cached_settings=None): |
|
252 | 252 | """ |
|
253 | 253 | Returns a plugin setting by name. |
|
254 | 254 | """ |
|
255 | 255 | full_name = 'rhodecode_{}'.format(self._get_setting_full_name(name)) |
|
256 | 256 | if plugin_cached_settings: |
|
257 | 257 | plugin_settings = plugin_cached_settings |
|
258 | 258 | else: |
|
259 | 259 | plugin_settings = SettingsModel().get_all_settings() |
|
260 | 260 | |
|
261 | 261 | if full_name in plugin_settings: |
|
262 | 262 | return plugin_settings[full_name] |
|
263 | 263 | else: |
|
264 | 264 | return default |
|
265 | 265 | |
|
266 | 266 | def create_or_update_setting(self, name, value): |
|
267 | 267 | """ |
|
268 | 268 | Create or update a setting for this plugin in the persistent storage. |
|
269 | 269 | """ |
|
270 | 270 | full_name = self._get_setting_full_name(name) |
|
271 | 271 | type_ = self._get_setting_type(name) |
|
272 | 272 | db_setting = SettingsModel().create_or_update_setting( |
|
273 | 273 | full_name, value, type_) |
|
274 | 274 | return db_setting.app_settings_value |
|
275 | 275 | |
|
276 | 276 | def log_safe_settings(self, settings): |
|
277 | 277 | """ |
|
278 | 278 | returns a log safe representation of settings, without any secrets |
|
279 | 279 | """ |
|
280 | 280 | settings_copy = copy.deepcopy(settings) |
|
281 | 281 | for k in self._settings_unsafe_keys: |
|
282 | 282 | if k in settings_copy: |
|
283 | 283 | del settings_copy[k] |
|
284 | 284 | return settings_copy |
|
285 | 285 | |
|
286 | 286 | @hybrid_property |
|
287 | 287 | def name(self): |
|
288 | 288 | """ |
|
289 | 289 | Returns the name of this authentication plugin. |
|
290 | 290 | |
|
291 | 291 | :returns: string |
|
292 | 292 | """ |
|
293 | 293 | raise NotImplementedError("Not implemented in base class") |
|
294 | 294 | |
|
295 | 295 | def get_url_slug(self): |
|
296 | 296 | """ |
|
297 | 297 | Returns a slug which should be used when constructing URLs which refer |
|
298 | 298 | to this plugin. By default it returns the plugin name. If the name is |
|
299 | 299 | not suitable for using it in an URL the plugin should override this |
|
300 | 300 | method. |
|
301 | 301 | """ |
|
302 | 302 | return self.name |
|
303 | 303 | |
|
304 | 304 | @property |
|
305 | 305 | def is_headers_auth(self): |
|
306 | 306 | """ |
|
307 | 307 | Returns True if this authentication plugin uses HTTP headers as |
|
308 | 308 | authentication method. |
|
309 | 309 | """ |
|
310 | 310 | return False |
|
311 | 311 | |
|
312 | 312 | @hybrid_property |
|
313 | 313 | def is_container_auth(self): |
|
314 | 314 | """ |
|
315 | 315 | Deprecated method that indicates if this authentication plugin uses |
|
316 | 316 | HTTP headers as authentication method. |
|
317 | 317 | """ |
|
318 | 318 | warnings.warn( |
|
319 | 319 | 'Use is_headers_auth instead.', category=DeprecationWarning) |
|
320 | 320 | return self.is_headers_auth |
|
321 | 321 | |
|
322 | 322 | @hybrid_property |
|
323 | 323 | def allows_creating_users(self): |
|
324 | 324 | """ |
|
325 | 325 | Defines if Plugin allows users to be created on-the-fly when |
|
326 | 326 | authentication is called. Controls how external plugins should behave |
|
327 | 327 | in terms if they are allowed to create new users, or not. Base plugins |
|
328 | 328 | should not be allowed to, but External ones should be ! |
|
329 | 329 | |
|
330 | 330 | :return: bool |
|
331 | 331 | """ |
|
332 | 332 | return False |
|
333 | 333 | |
|
334 | 334 | def set_auth_type(self, auth_type): |
|
335 | 335 | self.auth_type = auth_type |
|
336 | 336 | |
|
337 | 337 | def set_calling_scope_repo(self, acl_repo_name): |
|
338 | 338 | self.acl_repo_name = acl_repo_name |
|
339 | 339 | |
|
340 | 340 | def allows_authentication_from( |
|
341 | 341 | self, user, allows_non_existing_user=True, |
|
342 | 342 | allowed_auth_plugins=None, allowed_auth_sources=None): |
|
343 | 343 | """ |
|
344 | 344 | Checks if this authentication module should accept a request for |
|
345 | 345 | the current user. |
|
346 | 346 | |
|
347 | 347 | :param user: user object fetched using plugin's get_user() method. |
|
348 | 348 | :param allows_non_existing_user: if True, don't allow the |
|
349 | 349 | user to be empty, meaning not existing in our database |
|
350 | 350 | :param allowed_auth_plugins: if provided, users extern_type will be |
|
351 | 351 | checked against a list of provided extern types, which are plugin |
|
352 | 352 | auth_names in the end |
|
353 | 353 | :param allowed_auth_sources: authentication type allowed, |
|
354 | 354 | `http` or `vcs` default is both. |
|
355 | 355 | defines if plugin will accept only http authentication vcs |
|
356 | 356 | authentication(git/hg) or both |
|
357 | 357 | :returns: boolean |
|
358 | 358 | """ |
|
359 | 359 | if not user and not allows_non_existing_user: |
|
360 | 360 | log.debug('User is empty but plugin does not allow empty users,' |
|
361 | 361 | 'not allowed to authenticate') |
|
362 | 362 | return False |
|
363 | 363 | |
|
364 | 364 | expected_auth_plugins = allowed_auth_plugins or [self.name] |
|
365 | 365 | if user and (user.extern_type and |
|
366 | 366 | user.extern_type not in expected_auth_plugins): |
|
367 | 367 | log.debug( |
|
368 | 368 | 'User `%s` is bound to `%s` auth type. Plugin allows only ' |
|
369 | 369 | '%s, skipping', user, user.extern_type, expected_auth_plugins) |
|
370 | 370 | |
|
371 | 371 | return False |
|
372 | 372 | |
|
373 | 373 | # by default accept both |
|
374 | 374 | expected_auth_from = allowed_auth_sources or [HTTP_TYPE, VCS_TYPE] |
|
375 | 375 | if self.auth_type not in expected_auth_from: |
|
376 | 376 | log.debug('Current auth source is %s but plugin only allows %s', |
|
377 | 377 | self.auth_type, expected_auth_from) |
|
378 | 378 | return False |
|
379 | 379 | |
|
380 | 380 | return True |
|
381 | 381 | |
|
382 | 382 | def get_user(self, username=None, **kwargs): |
|
383 | 383 | """ |
|
384 | 384 | Helper method for user fetching in plugins, by default it's using |
|
385 | 385 | simple fetch by username, but this method can be custimized in plugins |
|
386 | 386 | eg. headers auth plugin to fetch user by environ params |
|
387 | 387 | |
|
388 | 388 | :param username: username if given to fetch from database |
|
389 | 389 | :param kwargs: extra arguments needed for user fetching. |
|
390 | 390 | """ |
|
391 | 391 | user = None |
|
392 | 392 | log.debug( |
|
393 | 393 | 'Trying to fetch user `%s` from RhodeCode database', username) |
|
394 | 394 | if username: |
|
395 | 395 | user = User.get_by_username(username) |
|
396 | 396 | if not user: |
|
397 | 397 | log.debug('User not found, fallback to fetch user in ' |
|
398 | 398 | 'case insensitive mode') |
|
399 | 399 | user = User.get_by_username(username, case_insensitive=True) |
|
400 | 400 | else: |
|
401 | 401 | log.debug('provided username:`%s` is empty skipping...', username) |
|
402 | 402 | if not user: |
|
403 | 403 | log.debug('User `%s` not found in database', username) |
|
404 | 404 | else: |
|
405 | 405 | log.debug('Got DB user:%s', user) |
|
406 | 406 | return user |
|
407 | 407 | |
|
408 | 408 | def user_activation_state(self): |
|
409 | 409 | """ |
|
410 | 410 | Defines user activation state when creating new users |
|
411 | 411 | |
|
412 | 412 | :returns: boolean |
|
413 | 413 | """ |
|
414 | 414 | raise NotImplementedError("Not implemented in base class") |
|
415 | 415 | |
|
416 | 416 | def auth(self, userobj, username, passwd, settings, **kwargs): |
|
417 | 417 | """ |
|
418 | 418 | Given a user object (which may be null), username, a plaintext |
|
419 | 419 | password, and a settings object (containing all the keys needed as |
|
420 | 420 | listed in settings()), authenticate this user's login attempt. |
|
421 | 421 | |
|
422 | 422 | Return None on failure. On success, return a dictionary of the form: |
|
423 | 423 | |
|
424 | 424 | see: RhodeCodeAuthPluginBase.auth_func_attrs |
|
425 | 425 | This is later validated for correctness |
|
426 | 426 | """ |
|
427 | 427 | raise NotImplementedError("not implemented in base class") |
|
428 | 428 | |
|
429 | 429 | def _authenticate(self, userobj, username, passwd, settings, **kwargs): |
|
430 | 430 | """ |
|
431 | 431 | Wrapper to call self.auth() that validates call on it |
|
432 | 432 | |
|
433 | 433 | :param userobj: userobj |
|
434 | 434 | :param username: username |
|
435 | 435 | :param passwd: plaintext password |
|
436 | 436 | :param settings: plugin settings |
|
437 | 437 | """ |
|
438 | 438 | auth = self.auth(userobj, username, passwd, settings, **kwargs) |
|
439 | 439 | if auth: |
|
440 | 440 | auth['_plugin'] = self.name |
|
441 | 441 | auth['_ttl_cache'] = self.get_ttl_cache(settings) |
|
442 | 442 | # check if hash should be migrated ? |
|
443 | 443 | new_hash = auth.get('_hash_migrate') |
|
444 | 444 | if new_hash: |
|
445 | 445 | self._migrate_hash_to_bcrypt(username, passwd, new_hash) |
|
446 | 446 | if 'user_group_sync' not in auth: |
|
447 | 447 | auth['user_group_sync'] = False |
|
448 | 448 | return self._validate_auth_return(auth) |
|
449 | 449 | return auth |
|
450 | 450 | |
|
451 | 451 | def _migrate_hash_to_bcrypt(self, username, password, new_hash): |
|
452 | 452 | new_hash_cypher = _RhodeCodeCryptoBCrypt() |
|
453 | 453 | # extra checks, so make sure new hash is correct. |
|
454 | 454 | password_encoded = safe_str(password) |
|
455 | 455 | if new_hash and new_hash_cypher.hash_check( |
|
456 | 456 | password_encoded, new_hash): |
|
457 | 457 | cur_user = User.get_by_username(username) |
|
458 | 458 | cur_user.password = new_hash |
|
459 | 459 | Session().add(cur_user) |
|
460 | 460 | Session().flush() |
|
461 | 461 | log.info('Migrated user %s hash to bcrypt', cur_user) |
|
462 | 462 | |
|
463 | 463 | def _validate_auth_return(self, ret): |
|
464 | 464 | if not isinstance(ret, dict): |
|
465 | 465 | raise Exception('returned value from auth must be a dict') |
|
466 | 466 | for k in self.auth_func_attrs: |
|
467 | 467 | if k not in ret: |
|
468 | 468 | raise Exception('Missing %s attribute from returned data' % k) |
|
469 | 469 | return ret |
|
470 | 470 | |
|
471 | 471 | def get_ttl_cache(self, settings=None): |
|
472 | 472 | plugin_settings = settings or self.get_settings() |
|
473 | 473 | # we set default to 30, we make a compromise here, |
|
474 | 474 | # performance > security, mostly due to LDAP/SVN, majority |
|
475 | 475 | # of users pick cache_ttl to be enabled |
|
476 | 476 | from rhodecode.authentication import plugin_default_auth_ttl |
|
477 | 477 | cache_ttl = plugin_default_auth_ttl |
|
478 | 478 | |
|
479 | 479 | if isinstance(self.AUTH_CACHE_TTL, (int, long)): |
|
480 | 480 | # plugin cache set inside is more important than the settings value |
|
481 | 481 | cache_ttl = self.AUTH_CACHE_TTL |
|
482 | 482 | elif plugin_settings.get('cache_ttl'): |
|
483 | 483 | cache_ttl = safe_int(plugin_settings.get('cache_ttl'), 0) |
|
484 | 484 | |
|
485 | 485 | plugin_cache_active = bool(cache_ttl and cache_ttl > 0) |
|
486 | 486 | return plugin_cache_active, cache_ttl |
|
487 | 487 | |
|
488 | 488 | |
|
489 | 489 | class RhodeCodeExternalAuthPlugin(RhodeCodeAuthPluginBase): |
|
490 | 490 | |
|
491 | 491 | @hybrid_property |
|
492 | 492 | def allows_creating_users(self): |
|
493 | 493 | return True |
|
494 | 494 | |
|
495 | 495 | def use_fake_password(self): |
|
496 | 496 | """ |
|
497 | 497 | Return a boolean that indicates whether or not we should set the user's |
|
498 | 498 | password to a random value when it is authenticated by this plugin. |
|
499 | 499 | If your plugin provides authentication, then you will generally |
|
500 | 500 | want this. |
|
501 | 501 | |
|
502 | 502 | :returns: boolean |
|
503 | 503 | """ |
|
504 | 504 | raise NotImplementedError("Not implemented in base class") |
|
505 | 505 | |
|
506 | 506 | def _authenticate(self, userobj, username, passwd, settings, **kwargs): |
|
507 | 507 | # at this point _authenticate calls plugin's `auth()` function |
|
508 | 508 | auth = super(RhodeCodeExternalAuthPlugin, self)._authenticate( |
|
509 | 509 | userobj, username, passwd, settings, **kwargs) |
|
510 | 510 | |
|
511 | 511 | if auth: |
|
512 | 512 | # maybe plugin will clean the username ? |
|
513 | 513 | # we should use the return value |
|
514 | 514 | username = auth['username'] |
|
515 | 515 | |
|
516 | 516 | # if external source tells us that user is not active, we should |
|
517 | 517 | # skip rest of the process. This can prevent from creating users in |
|
518 | 518 | # RhodeCode when using external authentication, but if it's |
|
519 | 519 | # inactive user we shouldn't create that user anyway |
|
520 | 520 | if auth['active_from_extern'] is False: |
|
521 | 521 | log.warning( |
|
522 | 522 | "User %s authenticated against %s, but is inactive", |
|
523 | 523 | username, self.__module__) |
|
524 | 524 | return None |
|
525 | 525 | |
|
526 | 526 | cur_user = User.get_by_username(username, case_insensitive=True) |
|
527 | 527 | is_user_existing = cur_user is not None |
|
528 | 528 | |
|
529 | 529 | if is_user_existing: |
|
530 | 530 | log.debug('Syncing user `%s` from ' |
|
531 | 531 | '`%s` plugin', username, self.name) |
|
532 | 532 | else: |
|
533 | 533 | log.debug('Creating non existing user `%s` from ' |
|
534 | 534 | '`%s` plugin', username, self.name) |
|
535 | 535 | |
|
536 | 536 | if self.allows_creating_users: |
|
537 | 537 | log.debug('Plugin `%s` allows to ' |
|
538 | 538 | 'create new users', self.name) |
|
539 | 539 | else: |
|
540 | 540 | log.debug('Plugin `%s` does not allow to ' |
|
541 | 541 | 'create new users', self.name) |
|
542 | 542 | |
|
543 | 543 | user_parameters = { |
|
544 | 544 | 'username': username, |
|
545 | 545 | 'email': auth["email"], |
|
546 | 546 | 'firstname': auth["firstname"], |
|
547 | 547 | 'lastname': auth["lastname"], |
|
548 | 548 | 'active': auth["active"], |
|
549 | 549 | 'admin': auth["admin"], |
|
550 | 550 | 'extern_name': auth["extern_name"], |
|
551 | 551 | 'extern_type': self.name, |
|
552 | 552 | 'plugin': self, |
|
553 | 553 | 'allow_to_create_user': self.allows_creating_users, |
|
554 | 554 | } |
|
555 | 555 | |
|
556 | 556 | if not is_user_existing: |
|
557 | 557 | if self.use_fake_password(): |
|
558 | 558 | # Randomize the PW because we don't need it, but don't want |
|
559 | 559 | # them blank either |
|
560 | 560 | passwd = PasswordGenerator().gen_password(length=16) |
|
561 | 561 | user_parameters['password'] = passwd |
|
562 | 562 | else: |
|
563 | 563 | # Since the password is required by create_or_update method of |
|
564 | 564 | # UserModel, we need to set it explicitly. |
|
565 | 565 | # The create_or_update method is smart and recognises the |
|
566 | 566 | # password hashes as well. |
|
567 | 567 | user_parameters['password'] = cur_user.password |
|
568 | 568 | |
|
569 | 569 | # we either create or update users, we also pass the flag |
|
570 | 570 | # that controls if this method can actually do that. |
|
571 | 571 | # raises NotAllowedToCreateUserError if it cannot, and we try to. |
|
572 | 572 | user = UserModel().create_or_update(**user_parameters) |
|
573 | 573 | Session().flush() |
|
574 | 574 | # enforce user is just in given groups, all of them has to be ones |
|
575 | 575 | # created from plugins. We store this info in _group_data JSON |
|
576 | 576 | # field |
|
577 | 577 | |
|
578 | 578 | if auth['user_group_sync']: |
|
579 | 579 | try: |
|
580 | 580 | groups = auth['groups'] or [] |
|
581 | 581 | log.debug( |
|
582 | 582 | 'Performing user_group sync based on set `%s` ' |
|
583 | 583 | 'returned by `%s` plugin', groups, self.name) |
|
584 | 584 | UserGroupModel().enforce_groups(user, groups, self.name) |
|
585 | 585 | except Exception: |
|
586 | 586 | # for any reason group syncing fails, we should |
|
587 | 587 | # proceed with login |
|
588 | 588 | log.error(traceback.format_exc()) |
|
589 | 589 | |
|
590 | 590 | Session().commit() |
|
591 | 591 | return auth |
|
592 | 592 | |
|
593 | 593 | |
|
594 | 594 | class AuthLdapBase(object): |
|
595 | 595 | |
|
596 | 596 | @classmethod |
|
597 | 597 | def _build_servers(cls, ldap_server_type, ldap_server, port, use_resolver=True): |
|
598 | 598 | |
|
599 | 599 | def host_resolver(host, port, full_resolve=True): |
|
600 | 600 | """ |
|
601 | 601 | Main work for this function is to prevent ldap connection issues, |
|
602 | 602 | and detect them early using a "greenified" sockets |
|
603 | 603 | """ |
|
604 | 604 | host = host.strip() |
|
605 | 605 | if not full_resolve: |
|
606 | 606 | return '{}:{}'.format(host, port) |
|
607 | 607 | |
|
608 | 608 | log.debug('LDAP: Resolving IP for LDAP host `%s`', host) |
|
609 | 609 | try: |
|
610 | 610 | ip = socket.gethostbyname(host) |
|
611 | 611 | log.debug('LDAP: Got LDAP host `%s` ip %s', host, ip) |
|
612 | 612 | except Exception: |
|
613 | 613 | raise LdapConnectionError('Failed to resolve host: `{}`'.format(host)) |
|
614 | 614 | |
|
615 | 615 | log.debug('LDAP: Checking if IP %s is accessible', ip) |
|
616 | 616 | s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) |
|
617 | 617 | try: |
|
618 | 618 | s.connect((ip, int(port))) |
|
619 | 619 | s.shutdown(socket.SHUT_RD) |
|
620 | 620 | log.debug('LDAP: connection to %s successful', ip) |
|
621 | 621 | except Exception: |
|
622 | 622 | raise LdapConnectionError( |
|
623 | 623 | 'Failed to connect to host: `{}:{}`'.format(host, port)) |
|
624 | 624 | |
|
625 | 625 | return '{}:{}'.format(host, port) |
|
626 | 626 | |
|
627 | 627 | if len(ldap_server) == 1: |
|
628 | 628 | # in case of single server use resolver to detect potential |
|
629 | 629 | # connection issues |
|
630 | 630 | full_resolve = True |
|
631 | 631 | else: |
|
632 | 632 | full_resolve = False |
|
633 | 633 | |
|
634 | 634 | return ', '.join( |
|
635 | 635 | ["{}://{}".format( |
|
636 | 636 | ldap_server_type, |
|
637 | 637 | host_resolver(host, port, full_resolve=use_resolver and full_resolve)) |
|
638 | 638 | for host in ldap_server]) |
|
639 | 639 | |
|
640 | 640 | @classmethod |
|
641 | 641 | def _get_server_list(cls, servers): |
|
642 | 642 | return map(string.strip, servers.split(',')) |
|
643 | 643 | |
|
644 | 644 | @classmethod |
|
645 | 645 | def get_uid(cls, username, server_addresses): |
|
646 | 646 | uid = username |
|
647 | 647 | for server_addr in server_addresses: |
|
648 | 648 | uid = chop_at(username, "@%s" % server_addr) |
|
649 | 649 | return uid |
|
650 | 650 | |
|
651 | 651 | @classmethod |
|
652 | 652 | def validate_username(cls, username): |
|
653 | 653 | if "," in username: |
|
654 | 654 | raise LdapUsernameError( |
|
655 | 655 | "invalid character `,` in username: `{}`".format(username)) |
|
656 | 656 | |
|
657 | 657 | @classmethod |
|
658 | 658 | def validate_password(cls, username, password): |
|
659 | 659 | if not password: |
|
660 | 660 | msg = "Authenticating user %s with blank password not allowed" |
|
661 | 661 | log.warning(msg, username) |
|
662 | 662 | raise LdapPasswordError(msg) |
|
663 | 663 | |
|
664 | 664 | |
|
665 | 665 | def loadplugin(plugin_id): |
|
666 | 666 | """ |
|
667 | 667 | Loads and returns an instantiated authentication plugin. |
|
668 | 668 | Returns the RhodeCodeAuthPluginBase subclass on success, |
|
669 | 669 | or None on failure. |
|
670 | 670 | """ |
|
671 | 671 | # TODO: Disusing pyramids thread locals to retrieve the registry. |
|
672 | 672 | authn_registry = get_authn_registry() |
|
673 | 673 | plugin = authn_registry.get_plugin(plugin_id) |
|
674 | 674 | if plugin is None: |
|
675 | 675 | log.error('Authentication plugin not found: "%s"', plugin_id) |
|
676 | 676 | return plugin |
|
677 | 677 | |
|
678 | 678 | |
|
679 | 679 | def get_authn_registry(registry=None): |
|
680 | 680 | registry = registry or get_current_registry() |
|
681 | 681 | authn_registry = registry.queryUtility(IAuthnPluginRegistry) |
|
682 | 682 | return authn_registry |
|
683 | 683 | |
|
684 | 684 | |
|
685 | 685 | def authenticate(username, password, environ=None, auth_type=None, |
|
686 | 686 | skip_missing=False, registry=None, acl_repo_name=None): |
|
687 | 687 | """ |
|
688 | 688 | Authentication function used for access control, |
|
689 | 689 | It tries to authenticate based on enabled authentication modules. |
|
690 | 690 | |
|
691 | 691 | :param username: username can be empty for headers auth |
|
692 | 692 | :param password: password can be empty for headers auth |
|
693 | 693 | :param environ: environ headers passed for headers auth |
|
694 | 694 | :param auth_type: type of authentication, either `HTTP_TYPE` or `VCS_TYPE` |
|
695 | 695 | :param skip_missing: ignores plugins that are in db but not in environment |
|
696 | 696 | :returns: None if auth failed, plugin_user dict if auth is correct |
|
697 | 697 | """ |
|
698 | 698 | if not auth_type or auth_type not in [HTTP_TYPE, VCS_TYPE]: |
|
699 | 699 | raise ValueError('auth type must be on of http, vcs got "%s" instead' |
|
700 | 700 | % auth_type) |
|
701 | 701 | headers_only = environ and not (username and password) |
|
702 | 702 | |
|
703 | 703 | authn_registry = get_authn_registry(registry) |
|
704 | 704 | |
|
705 | 705 | plugins_to_check = authn_registry.get_plugins_for_authentication() |
|
706 | 706 | log.debug('Starting ordered authentication chain using %s plugins', |
|
707 | 707 | [x.name for x in plugins_to_check]) |
|
708 | 708 | for plugin in plugins_to_check: |
|
709 | 709 | plugin.set_auth_type(auth_type) |
|
710 | 710 | plugin.set_calling_scope_repo(acl_repo_name) |
|
711 | 711 | |
|
712 | 712 | if headers_only and not plugin.is_headers_auth: |
|
713 | 713 | log.debug('Auth type is for headers only and plugin `%s` is not ' |
|
714 | 714 | 'headers plugin, skipping...', plugin.get_id()) |
|
715 | 715 | continue |
|
716 | 716 | |
|
717 | 717 | log.debug('Trying authentication using ** %s **', plugin.get_id()) |
|
718 | 718 | |
|
719 | 719 | # load plugin settings from RhodeCode database |
|
720 | 720 | plugin_settings = plugin.get_settings() |
|
721 | 721 | plugin_sanitized_settings = plugin.log_safe_settings(plugin_settings) |
|
722 | 722 | log.debug('Plugin `%s` settings:%s', plugin.get_id(), plugin_sanitized_settings) |
|
723 | 723 | |
|
724 | 724 | # use plugin's method of user extraction. |
|
725 | 725 | user = plugin.get_user(username, environ=environ, |
|
726 | 726 | settings=plugin_settings) |
|
727 | 727 | display_user = user.username if user else username |
|
728 | 728 | log.debug( |
|
729 | 729 | 'Plugin %s extracted user is `%s`', plugin.get_id(), display_user) |
|
730 | 730 | |
|
731 | 731 | if not plugin.allows_authentication_from(user): |
|
732 | 732 | log.debug('Plugin %s does not accept user `%s` for authentication', |
|
733 | 733 | plugin.get_id(), display_user) |
|
734 | 734 | continue |
|
735 | 735 | else: |
|
736 | 736 | log.debug('Plugin %s accepted user `%s` for authentication', |
|
737 | 737 | plugin.get_id(), display_user) |
|
738 | 738 | |
|
739 | 739 | log.info('Authenticating user `%s` using %s plugin', |
|
740 | 740 | display_user, plugin.get_id()) |
|
741 | 741 | |
|
742 | 742 | plugin_cache_active, cache_ttl = plugin.get_ttl_cache(plugin_settings) |
|
743 | 743 | |
|
744 | 744 | log.debug('AUTH_CACHE_TTL for plugin `%s` active: %s (TTL: %s)', |
|
745 | 745 | plugin.get_id(), plugin_cache_active, cache_ttl) |
|
746 | 746 | |
|
747 | 747 | user_id = user.user_id if user else 'no-user' |
|
748 | 748 | # don't cache for empty users |
|
749 | 749 | plugin_cache_active = plugin_cache_active and user_id |
|
750 | 750 | cache_namespace_uid = 'cache_user_auth.{}'.format(user_id) |
|
751 | 751 | region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid) |
|
752 | 752 | |
|
753 | 753 | @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, |
|
754 | 754 | expiration_time=cache_ttl, |
|
755 | 755 | condition=plugin_cache_active) |
|
756 | 756 | def compute_auth( |
|
757 | 757 | cache_name, plugin_name, username, password): |
|
758 | 758 | |
|
759 | 759 | # _authenticate is a wrapper for .auth() method of plugin. |
|
760 | 760 | # it checks if .auth() sends proper data. |
|
761 | 761 | # For RhodeCodeExternalAuthPlugin it also maps users to |
|
762 | 762 | # Database and maps the attributes returned from .auth() |
|
763 | 763 | # to RhodeCode database. If this function returns data |
|
764 | 764 | # then auth is correct. |
|
765 | 765 | log.debug('Running plugin `%s` _authenticate method ' |
|
766 | 766 | 'using username and password', plugin.get_id()) |
|
767 | 767 | return plugin._authenticate( |
|
768 | 768 | user, username, password, plugin_settings, |
|
769 | 769 | environ=environ or {}) |
|
770 | 770 | |
|
771 | 771 | start = time.time() |
|
772 | 772 | # for environ based auth, password can be empty, but then the validation is |
|
773 | 773 | # on the server that fills in the env data needed for authentication |
|
774 | 774 | plugin_user = compute_auth('auth', plugin.name, username, (password or '')) |
|
775 | 775 | |
|
776 | 776 | auth_time = time.time() - start |
|
777 | 777 | log.debug('Authentication for plugin `%s` completed in %.4fs, ' |
|
778 | 778 | 'expiration time of fetched cache %.1fs.', |
|
779 | 779 | plugin.get_id(), auth_time, cache_ttl) |
|
780 | 780 | |
|
781 | 781 | log.debug('PLUGIN USER DATA: %s', plugin_user) |
|
782 | 782 | |
|
783 | statsd = StatsdClient.statsd | |
|
784 | ||
|
783 | 785 | if plugin_user: |
|
784 | 786 | log.debug('Plugin returned proper authentication data') |
|
787 | if statsd: | |
|
788 | statsd.incr('rhodecode_login_success_total') | |
|
785 | 789 | return plugin_user |
|
790 | ||
|
786 | 791 | # we failed to Auth because .auth() method didn't return proper user |
|
787 | 792 | log.debug("User `%s` failed to authenticate against %s", |
|
788 | 793 | display_user, plugin.get_id()) |
|
794 | if statsd: | |
|
795 | statsd.incr('rhodecode_login_fail_total') | |
|
789 | 796 | |
|
790 | 797 | # case when we failed to authenticate against all defined plugins |
|
791 | 798 | return None |
|
792 | 799 | |
|
793 | 800 | |
|
794 | 801 | def chop_at(s, sub, inclusive=False): |
|
795 | 802 | """Truncate string ``s`` at the first occurrence of ``sub``. |
|
796 | 803 | |
|
797 | 804 | If ``inclusive`` is true, truncate just after ``sub`` rather than at it. |
|
798 | 805 | |
|
799 | 806 | >>> chop_at("plutocratic brats", "rat") |
|
800 | 807 | 'plutoc' |
|
801 | 808 | >>> chop_at("plutocratic brats", "rat", True) |
|
802 | 809 | 'plutocrat' |
|
803 | 810 | """ |
|
804 | 811 | pos = s.find(sub) |
|
805 | 812 | if pos == -1: |
|
806 | 813 | return s |
|
807 | 814 | if inclusive: |
|
808 | 815 | return s[:pos+len(sub)] |
|
809 | 816 | return s[:pos] |
@@ -1,796 +1,796 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import os |
|
22 | 22 | import sys |
|
23 | 23 | import logging |
|
24 | 24 | import collections |
|
25 | 25 | import tempfile |
|
26 | 26 | import time |
|
27 | 27 | |
|
28 | 28 | from paste.gzipper import make_gzip_middleware |
|
29 | 29 | import pyramid.events |
|
30 | 30 | from pyramid.wsgi import wsgiapp |
|
31 | 31 | from pyramid.authorization import ACLAuthorizationPolicy |
|
32 | 32 | from pyramid.config import Configurator |
|
33 | 33 | from pyramid.settings import asbool, aslist |
|
34 | 34 | from pyramid.httpexceptions import ( |
|
35 | 35 | HTTPException, HTTPError, HTTPInternalServerError, HTTPFound, HTTPNotFound) |
|
36 | 36 | from pyramid.renderers import render_to_response |
|
37 | 37 | |
|
38 | 38 | from rhodecode.model import meta |
|
39 | 39 | from rhodecode.config import patches |
|
40 | 40 | from rhodecode.config import utils as config_utils |
|
41 | 41 | from rhodecode.config.environment import load_pyramid_environment |
|
42 | 42 | |
|
43 | 43 | import rhodecode.events |
|
44 | 44 | from rhodecode.lib.middleware.vcs import VCSMiddleware |
|
45 | 45 | from rhodecode.lib.request import Request |
|
46 | 46 | from rhodecode.lib.vcs import VCSCommunicationError |
|
47 | 47 | from rhodecode.lib.exceptions import VCSServerUnavailable |
|
48 | 48 | from rhodecode.lib.middleware.appenlight import wrap_in_appenlight_if_enabled |
|
49 | 49 | from rhodecode.lib.middleware.https_fixup import HttpsFixup |
|
50 | 50 | from rhodecode.lib.plugins.utils import register_rhodecode_plugin |
|
51 | 51 | from rhodecode.lib.utils2 import aslist as rhodecode_aslist, AttributeDict |
|
52 | 52 | from rhodecode.lib.exc_tracking import store_exception |
|
53 | 53 | from rhodecode.subscribers import ( |
|
54 | 54 | scan_repositories_if_enabled, write_js_routes_if_enabled, |
|
55 | 55 | write_metadata_if_needed, write_usage_data) |
|
56 | 56 | from rhodecode.lib.statsd_client import StatsdClient |
|
57 | 57 | |
|
58 | 58 | log = logging.getLogger(__name__) |
|
59 | 59 | |
|
60 | 60 | |
|
61 | 61 | def is_http_error(response): |
|
62 | 62 | # error which should have traceback |
|
63 | 63 | return response.status_code > 499 |
|
64 | 64 | |
|
65 | 65 | |
|
66 | 66 | def should_load_all(): |
|
67 | 67 | """ |
|
68 | 68 | Returns if all application components should be loaded. In some cases it's |
|
69 | 69 | desired to skip apps loading for faster shell script execution |
|
70 | 70 | """ |
|
71 | 71 | ssh_cmd = os.environ.get('RC_CMD_SSH_WRAPPER') |
|
72 | 72 | if ssh_cmd: |
|
73 | 73 | return False |
|
74 | 74 | |
|
75 | 75 | return True |
|
76 | 76 | |
|
77 | 77 | |
|
78 | 78 | def make_pyramid_app(global_config, **settings): |
|
79 | 79 | """ |
|
80 | 80 | Constructs the WSGI application based on Pyramid. |
|
81 | 81 | |
|
82 | 82 | Specials: |
|
83 | 83 | |
|
84 | 84 | * The application can also be integrated like a plugin via the call to |
|
85 | 85 | `includeme`. This is accompanied with the other utility functions which |
|
86 | 86 | are called. Changing this should be done with great care to not break |
|
87 | 87 | cases when these fragments are assembled from another place. |
|
88 | 88 | |
|
89 | 89 | """ |
|
90 | 90 | |
|
91 | 91 | # Allows to use format style "{ENV_NAME}" placeholders in the configuration. It |
|
92 | 92 | # will be replaced by the value of the environment variable "NAME" in this case. |
|
93 | 93 | start_time = time.time() |
|
94 | 94 | log.info('Pyramid app config starting') |
|
95 | 95 | |
|
96 | 96 | # init and bootstrap StatsdClient |
|
97 | 97 | StatsdClient.setup(settings) |
|
98 | 98 | |
|
99 | 99 | debug = asbool(global_config.get('debug')) |
|
100 | 100 | if debug: |
|
101 | 101 | enable_debug() |
|
102 | 102 | |
|
103 | 103 | environ = {'ENV_{}'.format(key): value for key, value in os.environ.items()} |
|
104 | 104 | |
|
105 | 105 | global_config = _substitute_values(global_config, environ) |
|
106 | 106 | settings = _substitute_values(settings, environ) |
|
107 | 107 | |
|
108 | 108 | sanitize_settings_and_apply_defaults(global_config, settings) |
|
109 | 109 | |
|
110 | 110 | config = Configurator(settings=settings) |
|
111 | 111 | # Init our statsd at very start |
|
112 | 112 | config.registry.statsd = StatsdClient.statsd |
|
113 | 113 | |
|
114 | 114 | # Apply compatibility patches |
|
115 | 115 | patches.inspect_getargspec() |
|
116 | 116 | |
|
117 | 117 | load_pyramid_environment(global_config, settings) |
|
118 | 118 | |
|
119 | 119 | # Static file view comes first |
|
120 | 120 | includeme_first(config) |
|
121 | 121 | |
|
122 | 122 | includeme(config) |
|
123 | 123 | |
|
124 | 124 | pyramid_app = config.make_wsgi_app() |
|
125 | 125 | pyramid_app = wrap_app_in_wsgi_middlewares(pyramid_app, config) |
|
126 | 126 | pyramid_app.config = config |
|
127 | 127 | |
|
128 | 128 | config.configure_celery(global_config['__file__']) |
|
129 | 129 | |
|
130 | 130 | # creating the app uses a connection - return it after we are done |
|
131 | 131 | meta.Session.remove() |
|
132 | 132 | statsd = StatsdClient.statsd |
|
133 | 133 | |
|
134 | 134 | total_time = time.time() - start_time |
|
135 | 135 | log.info('Pyramid app `%s` created and configured in %.2fs', |
|
136 | 136 | pyramid_app.func_name, total_time) |
|
137 | 137 | if statsd: |
|
138 | 138 | elapsed_time_ms = 1000.0 * total_time |
|
139 | 139 | statsd.timing('rhodecode_app_bootstrap_timing', elapsed_time_ms, tags=[ |
|
140 | 140 | "pyramid_app:{}".format(pyramid_app.func_name) |
|
141 | 141 | ]) |
|
142 | 142 | return pyramid_app |
|
143 | 143 | |
|
144 | 144 | |
|
145 | 145 | def not_found_view(request): |
|
146 | 146 | """ |
|
147 | 147 | This creates the view which should be registered as not-found-view to |
|
148 | 148 | pyramid. |
|
149 | 149 | """ |
|
150 | 150 | |
|
151 | 151 | if not getattr(request, 'vcs_call', None): |
|
152 | 152 | # handle like regular case with our error_handler |
|
153 | 153 | return error_handler(HTTPNotFound(), request) |
|
154 | 154 | |
|
155 | 155 | # handle not found view as a vcs call |
|
156 | 156 | settings = request.registry.settings |
|
157 | 157 | ae_client = getattr(request, 'ae_client', None) |
|
158 | 158 | vcs_app = VCSMiddleware( |
|
159 | 159 | HTTPNotFound(), request.registry, settings, |
|
160 | 160 | appenlight_client=ae_client) |
|
161 | 161 | |
|
162 | 162 | return wsgiapp(vcs_app)(None, request) |
|
163 | 163 | |
|
164 | 164 | |
|
165 | 165 | def error_handler(exception, request): |
|
166 | 166 | import rhodecode |
|
167 | 167 | from rhodecode.lib import helpers |
|
168 | 168 | from rhodecode.lib.utils2 import str2bool |
|
169 | 169 | |
|
170 | 170 | rhodecode_title = rhodecode.CONFIG.get('rhodecode_title') or 'RhodeCode' |
|
171 | 171 | |
|
172 | 172 | base_response = HTTPInternalServerError() |
|
173 | 173 | # prefer original exception for the response since it may have headers set |
|
174 | 174 | if isinstance(exception, HTTPException): |
|
175 | 175 | base_response = exception |
|
176 | 176 | elif isinstance(exception, VCSCommunicationError): |
|
177 | 177 | base_response = VCSServerUnavailable() |
|
178 | 178 | |
|
179 | 179 | if is_http_error(base_response): |
|
180 | 180 | log.exception( |
|
181 | 181 | 'error occurred handling this request for path: %s', request.path) |
|
182 | 182 | |
|
183 | 183 | statsd = request.registry.statsd |
|
184 | 184 | if statsd and base_response.status_code > 499: |
|
185 | statsd.incr('rhodecode_exception', tags=["code:{}".format(base_response.status_code)]) | |
|
185 | statsd.incr('rhodecode_exception_total', tags=["code:{}".format(base_response.status_code)]) | |
|
186 | 186 | |
|
187 | 187 | error_explanation = base_response.explanation or str(base_response) |
|
188 | 188 | if base_response.status_code == 404: |
|
189 | 189 | error_explanation += " Optionally you don't have permission to access this page." |
|
190 | 190 | c = AttributeDict() |
|
191 | 191 | c.error_message = base_response.status |
|
192 | 192 | c.error_explanation = error_explanation |
|
193 | 193 | c.visual = AttributeDict() |
|
194 | 194 | |
|
195 | 195 | c.visual.rhodecode_support_url = ( |
|
196 | 196 | request.registry.settings.get('rhodecode_support_url') or |
|
197 | 197 | request.route_url('rhodecode_support') |
|
198 | 198 | ) |
|
199 | 199 | c.redirect_time = 0 |
|
200 | 200 | c.rhodecode_name = rhodecode_title |
|
201 | 201 | if not c.rhodecode_name: |
|
202 | 202 | c.rhodecode_name = 'Rhodecode' |
|
203 | 203 | |
|
204 | 204 | c.causes = [] |
|
205 | 205 | if is_http_error(base_response): |
|
206 | 206 | c.causes.append('Server is overloaded.') |
|
207 | 207 | c.causes.append('Server database connection is lost.') |
|
208 | 208 | c.causes.append('Server expected unhandled error.') |
|
209 | 209 | |
|
210 | 210 | if hasattr(base_response, 'causes'): |
|
211 | 211 | c.causes = base_response.causes |
|
212 | 212 | |
|
213 | 213 | c.messages = helpers.flash.pop_messages(request=request) |
|
214 | 214 | |
|
215 | 215 | exc_info = sys.exc_info() |
|
216 | 216 | c.exception_id = id(exc_info) |
|
217 | 217 | c.show_exception_id = isinstance(base_response, VCSServerUnavailable) \ |
|
218 | 218 | or base_response.status_code > 499 |
|
219 | 219 | c.exception_id_url = request.route_url( |
|
220 | 220 | 'admin_settings_exception_tracker_show', exception_id=c.exception_id) |
|
221 | 221 | |
|
222 | 222 | if c.show_exception_id: |
|
223 | 223 | store_exception(c.exception_id, exc_info) |
|
224 | 224 | c.exception_debug = str2bool(rhodecode.CONFIG.get('debug')) |
|
225 | 225 | c.exception_config_ini = rhodecode.CONFIG.get('__file__') |
|
226 | 226 | |
|
227 | 227 | response = render_to_response( |
|
228 | 228 | '/errors/error_document.mako', {'c': c, 'h': helpers}, request=request, |
|
229 | 229 | response=base_response) |
|
230 | 230 | |
|
231 | 231 | return response |
|
232 | 232 | |
|
233 | 233 | |
|
234 | 234 | def includeme_first(config): |
|
235 | 235 | # redirect automatic browser favicon.ico requests to correct place |
|
236 | 236 | def favicon_redirect(context, request): |
|
237 | 237 | return HTTPFound( |
|
238 | 238 | request.static_path('rhodecode:public/images/favicon.ico')) |
|
239 | 239 | |
|
240 | 240 | config.add_view(favicon_redirect, route_name='favicon') |
|
241 | 241 | config.add_route('favicon', '/favicon.ico') |
|
242 | 242 | |
|
243 | 243 | def robots_redirect(context, request): |
|
244 | 244 | return HTTPFound( |
|
245 | 245 | request.static_path('rhodecode:public/robots.txt')) |
|
246 | 246 | |
|
247 | 247 | config.add_view(robots_redirect, route_name='robots') |
|
248 | 248 | config.add_route('robots', '/robots.txt') |
|
249 | 249 | |
|
250 | 250 | config.add_static_view( |
|
251 | 251 | '_static/deform', 'deform:static') |
|
252 | 252 | config.add_static_view( |
|
253 | 253 | '_static/rhodecode', path='rhodecode:public', cache_max_age=3600 * 24) |
|
254 | 254 | |
|
255 | 255 | |
|
256 | 256 | def includeme(config, auth_resources=None): |
|
257 | 257 | from rhodecode.lib.celerylib.loader import configure_celery |
|
258 | 258 | log.debug('Initializing main includeme from %s', os.path.basename(__file__)) |
|
259 | 259 | settings = config.registry.settings |
|
260 | 260 | config.set_request_factory(Request) |
|
261 | 261 | |
|
262 | 262 | # plugin information |
|
263 | 263 | config.registry.rhodecode_plugins = collections.OrderedDict() |
|
264 | 264 | |
|
265 | 265 | config.add_directive( |
|
266 | 266 | 'register_rhodecode_plugin', register_rhodecode_plugin) |
|
267 | 267 | |
|
268 | 268 | config.add_directive('configure_celery', configure_celery) |
|
269 | 269 | |
|
270 | 270 | if asbool(settings.get('appenlight', 'false')): |
|
271 | 271 | config.include('appenlight_client.ext.pyramid_tween') |
|
272 | 272 | |
|
273 | 273 | load_all = should_load_all() |
|
274 | 274 | |
|
275 | 275 | # Includes which are required. The application would fail without them. |
|
276 | 276 | config.include('pyramid_mako') |
|
277 | 277 | config.include('rhodecode.lib.rc_beaker') |
|
278 | 278 | config.include('rhodecode.lib.rc_cache') |
|
279 | 279 | config.include('rhodecode.apps._base.navigation') |
|
280 | 280 | config.include('rhodecode.apps._base.subscribers') |
|
281 | 281 | config.include('rhodecode.tweens') |
|
282 | 282 | config.include('rhodecode.authentication') |
|
283 | 283 | |
|
284 | 284 | if load_all: |
|
285 | 285 | ce_auth_resources = [ |
|
286 | 286 | 'rhodecode.authentication.plugins.auth_crowd', |
|
287 | 287 | 'rhodecode.authentication.plugins.auth_headers', |
|
288 | 288 | 'rhodecode.authentication.plugins.auth_jasig_cas', |
|
289 | 289 | 'rhodecode.authentication.plugins.auth_ldap', |
|
290 | 290 | 'rhodecode.authentication.plugins.auth_pam', |
|
291 | 291 | 'rhodecode.authentication.plugins.auth_rhodecode', |
|
292 | 292 | 'rhodecode.authentication.plugins.auth_token', |
|
293 | 293 | ] |
|
294 | 294 | |
|
295 | 295 | # load CE authentication plugins |
|
296 | 296 | |
|
297 | 297 | if auth_resources: |
|
298 | 298 | ce_auth_resources.extend(auth_resources) |
|
299 | 299 | |
|
300 | 300 | for resource in ce_auth_resources: |
|
301 | 301 | config.include(resource) |
|
302 | 302 | |
|
303 | 303 | # Auto discover authentication plugins and include their configuration. |
|
304 | 304 | if asbool(settings.get('auth_plugin.import_legacy_plugins', 'true')): |
|
305 | 305 | from rhodecode.authentication import discover_legacy_plugins |
|
306 | 306 | discover_legacy_plugins(config) |
|
307 | 307 | |
|
308 | 308 | # apps |
|
309 | 309 | if load_all: |
|
310 | 310 | config.include('rhodecode.api') |
|
311 | 311 | config.include('rhodecode.apps._base') |
|
312 | 312 | config.include('rhodecode.apps.hovercards') |
|
313 | 313 | config.include('rhodecode.apps.ops') |
|
314 | 314 | config.include('rhodecode.apps.channelstream') |
|
315 | 315 | config.include('rhodecode.apps.file_store') |
|
316 | 316 | config.include('rhodecode.apps.admin') |
|
317 | 317 | config.include('rhodecode.apps.login') |
|
318 | 318 | config.include('rhodecode.apps.home') |
|
319 | 319 | config.include('rhodecode.apps.journal') |
|
320 | 320 | |
|
321 | 321 | config.include('rhodecode.apps.repository') |
|
322 | 322 | config.include('rhodecode.apps.repo_group') |
|
323 | 323 | config.include('rhodecode.apps.user_group') |
|
324 | 324 | config.include('rhodecode.apps.search') |
|
325 | 325 | config.include('rhodecode.apps.user_profile') |
|
326 | 326 | config.include('rhodecode.apps.user_group_profile') |
|
327 | 327 | config.include('rhodecode.apps.my_account') |
|
328 | 328 | config.include('rhodecode.apps.gist') |
|
329 | 329 | |
|
330 | 330 | config.include('rhodecode.apps.svn_support') |
|
331 | 331 | config.include('rhodecode.apps.ssh_support') |
|
332 | 332 | config.include('rhodecode.apps.debug_style') |
|
333 | 333 | |
|
334 | 334 | if load_all: |
|
335 | 335 | config.include('rhodecode.integrations') |
|
336 | 336 | |
|
337 | 337 | config.add_route('rhodecode_support', 'https://rhodecode.com/help/', static=True) |
|
338 | 338 | config.add_translation_dirs('rhodecode:i18n/') |
|
339 | 339 | settings['default_locale_name'] = settings.get('lang', 'en') |
|
340 | 340 | |
|
341 | 341 | # Add subscribers. |
|
342 | 342 | if load_all: |
|
343 | 343 | config.add_subscriber(scan_repositories_if_enabled, |
|
344 | 344 | pyramid.events.ApplicationCreated) |
|
345 | 345 | config.add_subscriber(write_metadata_if_needed, |
|
346 | 346 | pyramid.events.ApplicationCreated) |
|
347 | 347 | config.add_subscriber(write_usage_data, |
|
348 | 348 | pyramid.events.ApplicationCreated) |
|
349 | 349 | config.add_subscriber(write_js_routes_if_enabled, |
|
350 | 350 | pyramid.events.ApplicationCreated) |
|
351 | 351 | |
|
352 | 352 | # request custom methods |
|
353 | 353 | config.add_request_method( |
|
354 | 354 | 'rhodecode.lib.partial_renderer.get_partial_renderer', |
|
355 | 355 | 'get_partial_renderer') |
|
356 | 356 | |
|
357 | 357 | config.add_request_method( |
|
358 | 358 | 'rhodecode.lib.request_counter.get_request_counter', |
|
359 | 359 | 'request_count') |
|
360 | 360 | |
|
361 | 361 | # Set the authorization policy. |
|
362 | 362 | authz_policy = ACLAuthorizationPolicy() |
|
363 | 363 | config.set_authorization_policy(authz_policy) |
|
364 | 364 | |
|
365 | 365 | # Set the default renderer for HTML templates to mako. |
|
366 | 366 | config.add_mako_renderer('.html') |
|
367 | 367 | |
|
368 | 368 | config.add_renderer( |
|
369 | 369 | name='json_ext', |
|
370 | 370 | factory='rhodecode.lib.ext_json_renderer.pyramid_ext_json') |
|
371 | 371 | |
|
372 | 372 | config.add_renderer( |
|
373 | 373 | name='string_html', |
|
374 | 374 | factory='rhodecode.lib.string_renderer.html') |
|
375 | 375 | |
|
376 | 376 | # include RhodeCode plugins |
|
377 | 377 | includes = aslist(settings.get('rhodecode.includes', [])) |
|
378 | 378 | for inc in includes: |
|
379 | 379 | config.include(inc) |
|
380 | 380 | |
|
381 | 381 | # custom not found view, if our pyramid app doesn't know how to handle |
|
382 | 382 | # the request pass it to potential VCS handling ap |
|
383 | 383 | config.add_notfound_view(not_found_view) |
|
384 | 384 | if not settings.get('debugtoolbar.enabled', False): |
|
385 | 385 | # disabled debugtoolbar handle all exceptions via the error_handlers |
|
386 | 386 | config.add_view(error_handler, context=Exception) |
|
387 | 387 | |
|
388 | 388 | # all errors including 403/404/50X |
|
389 | 389 | config.add_view(error_handler, context=HTTPError) |
|
390 | 390 | |
|
391 | 391 | |
|
392 | 392 | def wrap_app_in_wsgi_middlewares(pyramid_app, config): |
|
393 | 393 | """ |
|
394 | 394 | Apply outer WSGI middlewares around the application. |
|
395 | 395 | """ |
|
396 | 396 | registry = config.registry |
|
397 | 397 | settings = registry.settings |
|
398 | 398 | |
|
399 | 399 | # enable https redirects based on HTTP_X_URL_SCHEME set by proxy |
|
400 | 400 | pyramid_app = HttpsFixup(pyramid_app, settings) |
|
401 | 401 | |
|
402 | 402 | pyramid_app, _ae_client = wrap_in_appenlight_if_enabled( |
|
403 | 403 | pyramid_app, settings) |
|
404 | 404 | registry.ae_client = _ae_client |
|
405 | 405 | |
|
406 | 406 | if settings['gzip_responses']: |
|
407 | 407 | pyramid_app = make_gzip_middleware( |
|
408 | 408 | pyramid_app, settings, compress_level=1) |
|
409 | 409 | |
|
410 | 410 | # this should be the outer most middleware in the wsgi stack since |
|
411 | 411 | # middleware like Routes make database calls |
|
412 | 412 | def pyramid_app_with_cleanup(environ, start_response): |
|
413 | 413 | try: |
|
414 | 414 | return pyramid_app(environ, start_response) |
|
415 | 415 | finally: |
|
416 | 416 | # Dispose current database session and rollback uncommitted |
|
417 | 417 | # transactions. |
|
418 | 418 | meta.Session.remove() |
|
419 | 419 | |
|
420 | 420 | # In a single threaded mode server, on non sqlite db we should have |
|
421 | 421 | # '0 Current Checked out connections' at the end of a request, |
|
422 | 422 | # if not, then something, somewhere is leaving a connection open |
|
423 | 423 | pool = meta.Base.metadata.bind.engine.pool |
|
424 | 424 | log.debug('sa pool status: %s', pool.status()) |
|
425 | 425 | log.debug('Request processing finalized') |
|
426 | 426 | |
|
427 | 427 | return pyramid_app_with_cleanup |
|
428 | 428 | |
|
429 | 429 | |
|
430 | 430 | def sanitize_settings_and_apply_defaults(global_config, settings): |
|
431 | 431 | """ |
|
432 | 432 | Applies settings defaults and does all type conversion. |
|
433 | 433 | |
|
434 | 434 | We would move all settings parsing and preparation into this place, so that |
|
435 | 435 | we have only one place left which deals with this part. The remaining parts |
|
436 | 436 | of the application would start to rely fully on well prepared settings. |
|
437 | 437 | |
|
438 | 438 | This piece would later be split up per topic to avoid a big fat monster |
|
439 | 439 | function. |
|
440 | 440 | """ |
|
441 | 441 | |
|
442 | 442 | settings.setdefault('rhodecode.edition', 'Community Edition') |
|
443 | 443 | settings.setdefault('rhodecode.edition_id', 'CE') |
|
444 | 444 | |
|
445 | 445 | if 'mako.default_filters' not in settings: |
|
446 | 446 | # set custom default filters if we don't have it defined |
|
447 | 447 | settings['mako.imports'] = 'from rhodecode.lib.base import h_filter' |
|
448 | 448 | settings['mako.default_filters'] = 'h_filter' |
|
449 | 449 | |
|
450 | 450 | if 'mako.directories' not in settings: |
|
451 | 451 | mako_directories = settings.setdefault('mako.directories', [ |
|
452 | 452 | # Base templates of the original application |
|
453 | 453 | 'rhodecode:templates', |
|
454 | 454 | ]) |
|
455 | 455 | log.debug( |
|
456 | 456 | "Using the following Mako template directories: %s", |
|
457 | 457 | mako_directories) |
|
458 | 458 | |
|
459 | 459 | # NOTE(marcink): fix redis requirement for schema of connection since 3.X |
|
460 | 460 | if 'beaker.session.type' in settings and settings['beaker.session.type'] == 'ext:redis': |
|
461 | 461 | raw_url = settings['beaker.session.url'] |
|
462 | 462 | if not raw_url.startswith(('redis://', 'rediss://', 'unix://')): |
|
463 | 463 | settings['beaker.session.url'] = 'redis://' + raw_url |
|
464 | 464 | |
|
465 | 465 | # Default includes, possible to change as a user |
|
466 | 466 | pyramid_includes = settings.setdefault('pyramid.includes', []) |
|
467 | 467 | log.debug( |
|
468 | 468 | "Using the following pyramid.includes: %s", |
|
469 | 469 | pyramid_includes) |
|
470 | 470 | |
|
471 | 471 | # TODO: johbo: Re-think this, usually the call to config.include |
|
472 | 472 | # should allow to pass in a prefix. |
|
473 | 473 | settings.setdefault('rhodecode.api.url', '/_admin/api') |
|
474 | 474 | settings.setdefault('__file__', global_config.get('__file__')) |
|
475 | 475 | |
|
476 | 476 | # Sanitize generic settings. |
|
477 | 477 | _list_setting(settings, 'default_encoding', 'UTF-8') |
|
478 | 478 | _bool_setting(settings, 'is_test', 'false') |
|
479 | 479 | _bool_setting(settings, 'gzip_responses', 'false') |
|
480 | 480 | |
|
481 | 481 | # Call split out functions that sanitize settings for each topic. |
|
482 | 482 | _sanitize_appenlight_settings(settings) |
|
483 | 483 | _sanitize_vcs_settings(settings) |
|
484 | 484 | _sanitize_cache_settings(settings) |
|
485 | 485 | |
|
486 | 486 | # configure instance id |
|
487 | 487 | config_utils.set_instance_id(settings) |
|
488 | 488 | |
|
489 | 489 | return settings |
|
490 | 490 | |
|
491 | 491 | |
|
492 | 492 | def enable_debug(): |
|
493 | 493 | """ |
|
494 | 494 | Helper to enable debug on running instance |
|
495 | 495 | :return: |
|
496 | 496 | """ |
|
497 | 497 | import tempfile |
|
498 | 498 | import textwrap |
|
499 | 499 | import logging.config |
|
500 | 500 | |
|
501 | 501 | ini_template = textwrap.dedent(""" |
|
502 | 502 | ##################################### |
|
503 | 503 | ### DEBUG LOGGING CONFIGURATION #### |
|
504 | 504 | ##################################### |
|
505 | 505 | [loggers] |
|
506 | 506 | keys = root, sqlalchemy, beaker, celery, rhodecode, ssh_wrapper |
|
507 | 507 | |
|
508 | 508 | [handlers] |
|
509 | 509 | keys = console, console_sql |
|
510 | 510 | |
|
511 | 511 | [formatters] |
|
512 | 512 | keys = generic, color_formatter, color_formatter_sql |
|
513 | 513 | |
|
514 | 514 | ############# |
|
515 | 515 | ## LOGGERS ## |
|
516 | 516 | ############# |
|
517 | 517 | [logger_root] |
|
518 | 518 | level = NOTSET |
|
519 | 519 | handlers = console |
|
520 | 520 | |
|
521 | 521 | [logger_sqlalchemy] |
|
522 | 522 | level = INFO |
|
523 | 523 | handlers = console_sql |
|
524 | 524 | qualname = sqlalchemy.engine |
|
525 | 525 | propagate = 0 |
|
526 | 526 | |
|
527 | 527 | [logger_beaker] |
|
528 | 528 | level = DEBUG |
|
529 | 529 | handlers = |
|
530 | 530 | qualname = beaker.container |
|
531 | 531 | propagate = 1 |
|
532 | 532 | |
|
533 | 533 | [logger_rhodecode] |
|
534 | 534 | level = DEBUG |
|
535 | 535 | handlers = |
|
536 | 536 | qualname = rhodecode |
|
537 | 537 | propagate = 1 |
|
538 | 538 | |
|
539 | 539 | [logger_ssh_wrapper] |
|
540 | 540 | level = DEBUG |
|
541 | 541 | handlers = |
|
542 | 542 | qualname = ssh_wrapper |
|
543 | 543 | propagate = 1 |
|
544 | 544 | |
|
545 | 545 | [logger_celery] |
|
546 | 546 | level = DEBUG |
|
547 | 547 | handlers = |
|
548 | 548 | qualname = celery |
|
549 | 549 | |
|
550 | 550 | |
|
551 | 551 | ############## |
|
552 | 552 | ## HANDLERS ## |
|
553 | 553 | ############## |
|
554 | 554 | |
|
555 | 555 | [handler_console] |
|
556 | 556 | class = StreamHandler |
|
557 | 557 | args = (sys.stderr, ) |
|
558 | 558 | level = DEBUG |
|
559 | 559 | formatter = color_formatter |
|
560 | 560 | |
|
561 | 561 | [handler_console_sql] |
|
562 | 562 | # "level = DEBUG" logs SQL queries and results. |
|
563 | 563 | # "level = INFO" logs SQL queries. |
|
564 | 564 | # "level = WARN" logs neither. (Recommended for production systems.) |
|
565 | 565 | class = StreamHandler |
|
566 | 566 | args = (sys.stderr, ) |
|
567 | 567 | level = WARN |
|
568 | 568 | formatter = color_formatter_sql |
|
569 | 569 | |
|
570 | 570 | ################ |
|
571 | 571 | ## FORMATTERS ## |
|
572 | 572 | ################ |
|
573 | 573 | |
|
574 | 574 | [formatter_generic] |
|
575 | 575 | class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter |
|
576 | 576 | format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s | %(req_id)s |
|
577 | 577 | datefmt = %Y-%m-%d %H:%M:%S |
|
578 | 578 | |
|
579 | 579 | [formatter_color_formatter] |
|
580 | 580 | class = rhodecode.lib.logging_formatter.ColorRequestTrackingFormatter |
|
581 | 581 | format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s | %(req_id)s |
|
582 | 582 | datefmt = %Y-%m-%d %H:%M:%S |
|
583 | 583 | |
|
584 | 584 | [formatter_color_formatter_sql] |
|
585 | 585 | class = rhodecode.lib.logging_formatter.ColorFormatterSql |
|
586 | 586 | format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s |
|
587 | 587 | datefmt = %Y-%m-%d %H:%M:%S |
|
588 | 588 | """) |
|
589 | 589 | |
|
590 | 590 | with tempfile.NamedTemporaryFile(prefix='rc_debug_logging_', suffix='.ini', |
|
591 | 591 | delete=False) as f: |
|
592 | 592 | log.info('Saved Temporary DEBUG config at %s', f.name) |
|
593 | 593 | f.write(ini_template) |
|
594 | 594 | |
|
595 | 595 | logging.config.fileConfig(f.name) |
|
596 | 596 | log.debug('DEBUG MODE ON') |
|
597 | 597 | os.remove(f.name) |
|
598 | 598 | |
|
599 | 599 | |
|
600 | 600 | def _sanitize_appenlight_settings(settings): |
|
601 | 601 | _bool_setting(settings, 'appenlight', 'false') |
|
602 | 602 | |
|
603 | 603 | |
|
604 | 604 | def _sanitize_vcs_settings(settings): |
|
605 | 605 | """ |
|
606 | 606 | Applies settings defaults and does type conversion for all VCS related |
|
607 | 607 | settings. |
|
608 | 608 | """ |
|
609 | 609 | _string_setting(settings, 'vcs.svn.compatible_version', '') |
|
610 | 610 | _string_setting(settings, 'vcs.hooks.protocol', 'http') |
|
611 | 611 | _string_setting(settings, 'vcs.hooks.host', '127.0.0.1') |
|
612 | 612 | _string_setting(settings, 'vcs.scm_app_implementation', 'http') |
|
613 | 613 | _string_setting(settings, 'vcs.server', '') |
|
614 | 614 | _string_setting(settings, 'vcs.server.protocol', 'http') |
|
615 | 615 | _bool_setting(settings, 'startup.import_repos', 'false') |
|
616 | 616 | _bool_setting(settings, 'vcs.hooks.direct_calls', 'false') |
|
617 | 617 | _bool_setting(settings, 'vcs.server.enable', 'true') |
|
618 | 618 | _bool_setting(settings, 'vcs.start_server', 'false') |
|
619 | 619 | _list_setting(settings, 'vcs.backends', 'hg, git, svn') |
|
620 | 620 | _int_setting(settings, 'vcs.connection_timeout', 3600) |
|
621 | 621 | |
|
622 | 622 | # Support legacy values of vcs.scm_app_implementation. Legacy |
|
623 | 623 | # configurations may use 'rhodecode.lib.middleware.utils.scm_app_http', or |
|
624 | 624 | # disabled since 4.13 'vcsserver.scm_app' which is now mapped to 'http'. |
|
625 | 625 | scm_app_impl = settings['vcs.scm_app_implementation'] |
|
626 | 626 | if scm_app_impl in ['rhodecode.lib.middleware.utils.scm_app_http', 'vcsserver.scm_app']: |
|
627 | 627 | settings['vcs.scm_app_implementation'] = 'http' |
|
628 | 628 | |
|
629 | 629 | |
|
630 | 630 | def _sanitize_cache_settings(settings): |
|
631 | 631 | temp_store = tempfile.gettempdir() |
|
632 | 632 | default_cache_dir = os.path.join(temp_store, 'rc_cache') |
|
633 | 633 | |
|
634 | 634 | # save default, cache dir, and use it for all backends later. |
|
635 | 635 | default_cache_dir = _string_setting( |
|
636 | 636 | settings, |
|
637 | 637 | 'cache_dir', |
|
638 | 638 | default_cache_dir, lower=False, default_when_empty=True) |
|
639 | 639 | |
|
640 | 640 | # ensure we have our dir created |
|
641 | 641 | if not os.path.isdir(default_cache_dir): |
|
642 | 642 | os.makedirs(default_cache_dir, mode=0o755) |
|
643 | 643 | |
|
644 | 644 | # exception store cache |
|
645 | 645 | _string_setting( |
|
646 | 646 | settings, |
|
647 | 647 | 'exception_tracker.store_path', |
|
648 | 648 | temp_store, lower=False, default_when_empty=True) |
|
649 | 649 | _bool_setting( |
|
650 | 650 | settings, |
|
651 | 651 | 'exception_tracker.send_email', |
|
652 | 652 | 'false') |
|
653 | 653 | _string_setting( |
|
654 | 654 | settings, |
|
655 | 655 | 'exception_tracker.email_prefix', |
|
656 | 656 | '[RHODECODE ERROR]', lower=False, default_when_empty=True) |
|
657 | 657 | |
|
658 | 658 | # cache_perms |
|
659 | 659 | _string_setting( |
|
660 | 660 | settings, |
|
661 | 661 | 'rc_cache.cache_perms.backend', |
|
662 | 662 | 'dogpile.cache.rc.file_namespace', lower=False) |
|
663 | 663 | _int_setting( |
|
664 | 664 | settings, |
|
665 | 665 | 'rc_cache.cache_perms.expiration_time', |
|
666 | 666 | 60) |
|
667 | 667 | _string_setting( |
|
668 | 668 | settings, |
|
669 | 669 | 'rc_cache.cache_perms.arguments.filename', |
|
670 | 670 | os.path.join(default_cache_dir, 'rc_cache_1'), lower=False) |
|
671 | 671 | |
|
672 | 672 | # cache_repo |
|
673 | 673 | _string_setting( |
|
674 | 674 | settings, |
|
675 | 675 | 'rc_cache.cache_repo.backend', |
|
676 | 676 | 'dogpile.cache.rc.file_namespace', lower=False) |
|
677 | 677 | _int_setting( |
|
678 | 678 | settings, |
|
679 | 679 | 'rc_cache.cache_repo.expiration_time', |
|
680 | 680 | 60) |
|
681 | 681 | _string_setting( |
|
682 | 682 | settings, |
|
683 | 683 | 'rc_cache.cache_repo.arguments.filename', |
|
684 | 684 | os.path.join(default_cache_dir, 'rc_cache_2'), lower=False) |
|
685 | 685 | |
|
686 | 686 | # cache_license |
|
687 | 687 | _string_setting( |
|
688 | 688 | settings, |
|
689 | 689 | 'rc_cache.cache_license.backend', |
|
690 | 690 | 'dogpile.cache.rc.file_namespace', lower=False) |
|
691 | 691 | _int_setting( |
|
692 | 692 | settings, |
|
693 | 693 | 'rc_cache.cache_license.expiration_time', |
|
694 | 694 | 5*60) |
|
695 | 695 | _string_setting( |
|
696 | 696 | settings, |
|
697 | 697 | 'rc_cache.cache_license.arguments.filename', |
|
698 | 698 | os.path.join(default_cache_dir, 'rc_cache_3'), lower=False) |
|
699 | 699 | |
|
700 | 700 | # cache_repo_longterm memory, 96H |
|
701 | 701 | _string_setting( |
|
702 | 702 | settings, |
|
703 | 703 | 'rc_cache.cache_repo_longterm.backend', |
|
704 | 704 | 'dogpile.cache.rc.memory_lru', lower=False) |
|
705 | 705 | _int_setting( |
|
706 | 706 | settings, |
|
707 | 707 | 'rc_cache.cache_repo_longterm.expiration_time', |
|
708 | 708 | 345600) |
|
709 | 709 | _int_setting( |
|
710 | 710 | settings, |
|
711 | 711 | 'rc_cache.cache_repo_longterm.max_size', |
|
712 | 712 | 10000) |
|
713 | 713 | |
|
714 | 714 | # sql_cache_short |
|
715 | 715 | _string_setting( |
|
716 | 716 | settings, |
|
717 | 717 | 'rc_cache.sql_cache_short.backend', |
|
718 | 718 | 'dogpile.cache.rc.memory_lru', lower=False) |
|
719 | 719 | _int_setting( |
|
720 | 720 | settings, |
|
721 | 721 | 'rc_cache.sql_cache_short.expiration_time', |
|
722 | 722 | 30) |
|
723 | 723 | _int_setting( |
|
724 | 724 | settings, |
|
725 | 725 | 'rc_cache.sql_cache_short.max_size', |
|
726 | 726 | 10000) |
|
727 | 727 | |
|
728 | 728 | |
|
729 | 729 | def _int_setting(settings, name, default): |
|
730 | 730 | settings[name] = int(settings.get(name, default)) |
|
731 | 731 | return settings[name] |
|
732 | 732 | |
|
733 | 733 | |
|
734 | 734 | def _bool_setting(settings, name, default): |
|
735 | 735 | input_val = settings.get(name, default) |
|
736 | 736 | if isinstance(input_val, unicode): |
|
737 | 737 | input_val = input_val.encode('utf8') |
|
738 | 738 | settings[name] = asbool(input_val) |
|
739 | 739 | return settings[name] |
|
740 | 740 | |
|
741 | 741 | |
|
742 | 742 | def _list_setting(settings, name, default): |
|
743 | 743 | raw_value = settings.get(name, default) |
|
744 | 744 | |
|
745 | 745 | old_separator = ',' |
|
746 | 746 | if old_separator in raw_value: |
|
747 | 747 | # If we get a comma separated list, pass it to our own function. |
|
748 | 748 | settings[name] = rhodecode_aslist(raw_value, sep=old_separator) |
|
749 | 749 | else: |
|
750 | 750 | # Otherwise we assume it uses pyramids space/newline separation. |
|
751 | 751 | settings[name] = aslist(raw_value) |
|
752 | 752 | return settings[name] |
|
753 | 753 | |
|
754 | 754 | |
|
755 | 755 | def _string_setting(settings, name, default, lower=True, default_when_empty=False): |
|
756 | 756 | value = settings.get(name, default) |
|
757 | 757 | |
|
758 | 758 | if default_when_empty and not value: |
|
759 | 759 | # use default value when value is empty |
|
760 | 760 | value = default |
|
761 | 761 | |
|
762 | 762 | if lower: |
|
763 | 763 | value = value.lower() |
|
764 | 764 | settings[name] = value |
|
765 | 765 | return settings[name] |
|
766 | 766 | |
|
767 | 767 | |
|
768 | 768 | def _substitute_values(mapping, substitutions): |
|
769 | 769 | result = {} |
|
770 | 770 | |
|
771 | 771 | try: |
|
772 | 772 | for key, value in mapping.items(): |
|
773 | 773 | # initialize without substitution first |
|
774 | 774 | result[key] = value |
|
775 | 775 | |
|
776 | 776 | # Note: Cannot use regular replacements, since they would clash |
|
777 | 777 | # with the implementation of ConfigParser. Using "format" instead. |
|
778 | 778 | try: |
|
779 | 779 | result[key] = value.format(**substitutions) |
|
780 | 780 | except KeyError as e: |
|
781 | 781 | env_var = '{}'.format(e.args[0]) |
|
782 | 782 | |
|
783 | 783 | msg = 'Failed to substitute: `{key}={{{var}}}` with environment entry. ' \ |
|
784 | 784 | 'Make sure your environment has {var} set, or remove this ' \ |
|
785 | 785 | 'variable from config file'.format(key=key, var=env_var) |
|
786 | 786 | |
|
787 | 787 | if env_var.startswith('ENV_'): |
|
788 | 788 | raise ValueError(msg) |
|
789 | 789 | else: |
|
790 | 790 | log.warning(msg) |
|
791 | 791 | |
|
792 | 792 | except ValueError as e: |
|
793 | 793 | log.warning('Failed to substitute ENV variable: %s', e) |
|
794 | 794 | result = mapping |
|
795 | 795 | |
|
796 | 796 | return result |
@@ -1,81 +1,81 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import socket |
|
22 | 22 | import logging |
|
23 | 23 | |
|
24 | 24 | import rhodecode |
|
25 | 25 | from zope.cachedescriptors.property import Lazy as LazyProperty |
|
26 | 26 | from rhodecode.lib.celerylib.loader import ( |
|
27 | 27 | celery_app, RequestContextTask, get_logger) |
|
28 | 28 | from rhodecode.lib.statsd_client import StatsdClient |
|
29 | 29 | |
|
30 | 30 | async_task = celery_app.task |
|
31 | 31 | |
|
32 | 32 | |
|
33 | 33 | log = logging.getLogger(__name__) |
|
34 | 34 | |
|
35 | 35 | |
|
36 | 36 | class ResultWrapper(object): |
|
37 | 37 | def __init__(self, task): |
|
38 | 38 | self.task = task |
|
39 | 39 | |
|
40 | 40 | @LazyProperty |
|
41 | 41 | def result(self): |
|
42 | 42 | return self.task |
|
43 | 43 | |
|
44 | 44 | |
|
45 | 45 | def run_task(task, *args, **kwargs): |
|
46 | 46 | log.debug('Got task `%s` for execution, celery mode enabled:%s', task, rhodecode.CELERY_ENABLED) |
|
47 | 47 | if task is None: |
|
48 | 48 | raise ValueError('Got non-existing task for execution') |
|
49 | 49 | |
|
50 | 50 | statsd = StatsdClient.statsd |
|
51 | 51 | exec_mode = 'sync' |
|
52 | 52 | |
|
53 | 53 | if rhodecode.CELERY_ENABLED: |
|
54 | 54 | |
|
55 | 55 | try: |
|
56 | 56 | t = task.apply_async(args=args, kwargs=kwargs) |
|
57 | 57 | log.debug('executing task %s:%s in async mode', t.task_id, task) |
|
58 | 58 | exec_mode = 'async' |
|
59 | 59 | return t |
|
60 | 60 | |
|
61 | 61 | except socket.error as e: |
|
62 | 62 | if isinstance(e, IOError) and e.errno == 111: |
|
63 | 63 | log.error('Unable to connect to celeryd `%s`. Sync execution', e) |
|
64 | 64 | else: |
|
65 | 65 | log.exception("Exception while connecting to celeryd.") |
|
66 | 66 | except KeyError as e: |
|
67 | 67 | log.error('Unable to connect to celeryd `%s`. Sync execution', e) |
|
68 | 68 | except Exception as e: |
|
69 | 69 | log.exception( |
|
70 | 70 | "Exception while trying to run task asynchronous. " |
|
71 | 71 | "Fallback to sync execution.") |
|
72 | 72 | |
|
73 | 73 | else: |
|
74 | 74 | log.debug('executing task %s:%s in sync mode', 'TASK', task) |
|
75 | 75 | |
|
76 | 76 | if statsd: |
|
77 | statsd.incr('rhodecode_celery_task', tags=[ | |
|
77 | statsd.incr('rhodecode_celery_task_total', tags=[ | |
|
78 | 78 | 'task:{}'.format(task), |
|
79 | 79 | 'mode:{}'.format(exec_mode) |
|
80 | 80 | ]) |
|
81 | 81 | return ResultWrapper(task(*args, **kwargs)) |
@@ -1,310 +1,310 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | """ |
|
21 | 21 | Celery loader, run with:: |
|
22 | 22 | |
|
23 | 23 | celery worker \ |
|
24 | 24 | --beat \ |
|
25 | 25 | --app rhodecode.lib.celerylib.loader \ |
|
26 | 26 | --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler \ |
|
27 | 27 | --loglevel DEBUG --ini=._dev/dev.ini |
|
28 | 28 | """ |
|
29 | 29 | import os |
|
30 | 30 | import logging |
|
31 | 31 | import importlib |
|
32 | 32 | |
|
33 | 33 | from celery import Celery |
|
34 | 34 | from celery import signals |
|
35 | 35 | from celery import Task |
|
36 | 36 | from celery import exceptions # pragma: no cover |
|
37 | 37 | from kombu.serialization import register |
|
38 | 38 | from pyramid.threadlocal import get_current_request |
|
39 | 39 | |
|
40 | 40 | import rhodecode |
|
41 | 41 | |
|
42 | 42 | from rhodecode.lib.auth import AuthUser |
|
43 | 43 | from rhodecode.lib.celerylib.utils import get_ini_config, parse_ini_vars, ping_db |
|
44 | 44 | from rhodecode.lib.ext_json import json |
|
45 | 45 | from rhodecode.lib.pyramid_utils import bootstrap, setup_logging, prepare_request |
|
46 | 46 | from rhodecode.lib.utils2 import str2bool |
|
47 | 47 | from rhodecode.model import meta |
|
48 | 48 | |
|
49 | 49 | |
|
50 | 50 | register('json_ext', json.dumps, json.loads, |
|
51 | 51 | content_type='application/x-json-ext', |
|
52 | 52 | content_encoding='utf-8') |
|
53 | 53 | |
|
54 | 54 | log = logging.getLogger('celery.rhodecode.loader') |
|
55 | 55 | |
|
56 | 56 | |
|
57 | 57 | def add_preload_arguments(parser): |
|
58 | 58 | parser.add_argument( |
|
59 | 59 | '--ini', default=None, |
|
60 | 60 | help='Path to ini configuration file.' |
|
61 | 61 | ) |
|
62 | 62 | parser.add_argument( |
|
63 | 63 | '--ini-var', default=None, |
|
64 | 64 | help='Comma separated list of key=value to pass to ini.' |
|
65 | 65 | ) |
|
66 | 66 | |
|
67 | 67 | |
|
68 | 68 | def get_logger(obj): |
|
69 | 69 | custom_log = logging.getLogger( |
|
70 | 70 | 'rhodecode.task.{}'.format(obj.__class__.__name__)) |
|
71 | 71 | |
|
72 | 72 | if rhodecode.CELERY_ENABLED: |
|
73 | 73 | try: |
|
74 | 74 | custom_log = obj.get_logger() |
|
75 | 75 | except Exception: |
|
76 | 76 | pass |
|
77 | 77 | |
|
78 | 78 | return custom_log |
|
79 | 79 | |
|
80 | 80 | |
|
81 | 81 | imports = ['rhodecode.lib.celerylib.tasks'] |
|
82 | 82 | |
|
83 | 83 | try: |
|
84 | 84 | # try if we have EE tasks available |
|
85 | 85 | importlib.import_module('rc_ee') |
|
86 | 86 | imports.append('rc_ee.lib.celerylib.tasks') |
|
87 | 87 | except ImportError: |
|
88 | 88 | pass |
|
89 | 89 | |
|
90 | 90 | |
|
91 | 91 | base_celery_config = { |
|
92 | 92 | 'result_backend': 'rpc://', |
|
93 | 93 | 'result_expires': 60 * 60 * 24, |
|
94 | 94 | 'result_persistent': True, |
|
95 | 95 | 'imports': imports, |
|
96 | 96 | 'worker_max_tasks_per_child': 100, |
|
97 | 97 | 'accept_content': ['json_ext'], |
|
98 | 98 | 'task_serializer': 'json_ext', |
|
99 | 99 | 'result_serializer': 'json_ext', |
|
100 | 100 | 'worker_hijack_root_logger': False, |
|
101 | 101 | 'database_table_names': { |
|
102 | 102 | 'task': 'beat_taskmeta', |
|
103 | 103 | 'group': 'beat_groupmeta', |
|
104 | 104 | } |
|
105 | 105 | } |
|
106 | 106 | # init main celery app |
|
107 | 107 | celery_app = Celery() |
|
108 | 108 | celery_app.user_options['preload'].add(add_preload_arguments) |
|
109 | 109 | ini_file_glob = None |
|
110 | 110 | |
|
111 | 111 | |
|
112 | 112 | @signals.setup_logging.connect |
|
113 | 113 | def setup_logging_callback(**kwargs): |
|
114 | 114 | setup_logging(ini_file_glob) |
|
115 | 115 | |
|
116 | 116 | |
|
117 | 117 | @signals.user_preload_options.connect |
|
118 | 118 | def on_preload_parsed(options, **kwargs): |
|
119 | 119 | ini_location = options['ini'] |
|
120 | 120 | ini_vars = options['ini_var'] |
|
121 | 121 | celery_app.conf['INI_PYRAMID'] = options['ini'] |
|
122 | 122 | |
|
123 | 123 | if ini_location is None: |
|
124 | 124 | print('You must provide the paste --ini argument') |
|
125 | 125 | exit(-1) |
|
126 | 126 | |
|
127 | 127 | options = None |
|
128 | 128 | if ini_vars is not None: |
|
129 | 129 | options = parse_ini_vars(ini_vars) |
|
130 | 130 | |
|
131 | 131 | global ini_file_glob |
|
132 | 132 | ini_file_glob = ini_location |
|
133 | 133 | |
|
134 | 134 | log.debug('Bootstrapping RhodeCode application...') |
|
135 | 135 | env = bootstrap(ini_location, options=options) |
|
136 | 136 | |
|
137 | 137 | setup_celery_app( |
|
138 | 138 | app=env['app'], root=env['root'], request=env['request'], |
|
139 | 139 | registry=env['registry'], closer=env['closer'], |
|
140 | 140 | ini_location=ini_location) |
|
141 | 141 | |
|
142 | 142 | # fix the global flag even if it's disabled via .ini file because this |
|
143 | 143 | # is a worker code that doesn't need this to be disabled. |
|
144 | 144 | rhodecode.CELERY_ENABLED = True |
|
145 | 145 | |
|
146 | 146 | |
|
147 | 147 | @signals.task_prerun.connect |
|
148 | 148 | def task_prerun_signal(task_id, task, args, **kwargs): |
|
149 | 149 | ping_db() |
|
150 | 150 | |
|
151 | 151 | |
|
152 | 152 | @signals.task_success.connect |
|
153 | 153 | def task_success_signal(result, **kwargs): |
|
154 | 154 | meta.Session.commit() |
|
155 | 155 | closer = celery_app.conf['PYRAMID_CLOSER'] |
|
156 | 156 | if closer: |
|
157 | 157 | closer() |
|
158 | 158 | |
|
159 | 159 | |
|
160 | 160 | @signals.task_retry.connect |
|
161 | 161 | def task_retry_signal( |
|
162 | 162 | request, reason, einfo, **kwargs): |
|
163 | 163 | meta.Session.remove() |
|
164 | 164 | closer = celery_app.conf['PYRAMID_CLOSER'] |
|
165 | 165 | if closer: |
|
166 | 166 | closer() |
|
167 | 167 | |
|
168 | 168 | |
|
169 | 169 | @signals.task_failure.connect |
|
170 | 170 | def task_failure_signal( |
|
171 | 171 | task_id, exception, args, kwargs, traceback, einfo, **kargs): |
|
172 | 172 | from rhodecode.lib.exc_tracking import store_exception |
|
173 | 173 | from rhodecode.lib.statsd_client import StatsdClient |
|
174 | 174 | |
|
175 | 175 | meta.Session.remove() |
|
176 | 176 | |
|
177 | 177 | # simulate sys.exc_info() |
|
178 | 178 | exc_info = (einfo.type, einfo.exception, einfo.tb) |
|
179 | 179 | store_exception(id(exc_info), exc_info, prefix='rhodecode-celery') |
|
180 | 180 | statsd = StatsdClient.statsd |
|
181 | 181 | if statsd: |
|
182 | statsd.incr('rhodecode_exception', tags=["celery"]) | |
|
182 | statsd.incr('rhodecode_exception_total', tags=["exc_source:celery"]) | |
|
183 | 183 | |
|
184 | 184 | closer = celery_app.conf['PYRAMID_CLOSER'] |
|
185 | 185 | if closer: |
|
186 | 186 | closer() |
|
187 | 187 | |
|
188 | 188 | |
|
189 | 189 | @signals.task_revoked.connect |
|
190 | 190 | def task_revoked_signal( |
|
191 | 191 | request, terminated, signum, expired, **kwargs): |
|
192 | 192 | closer = celery_app.conf['PYRAMID_CLOSER'] |
|
193 | 193 | if closer: |
|
194 | 194 | closer() |
|
195 | 195 | |
|
196 | 196 | |
|
197 | 197 | def setup_celery_app(app, root, request, registry, closer, ini_location): |
|
198 | 198 | ini_dir = os.path.dirname(os.path.abspath(ini_location)) |
|
199 | 199 | celery_config = base_celery_config |
|
200 | 200 | celery_config.update({ |
|
201 | 201 | # store celerybeat scheduler db where the .ini file is |
|
202 | 202 | 'beat_schedule_filename': os.path.join(ini_dir, 'celerybeat-schedule'), |
|
203 | 203 | }) |
|
204 | 204 | ini_settings = get_ini_config(ini_location) |
|
205 | 205 | log.debug('Got custom celery conf: %s', ini_settings) |
|
206 | 206 | |
|
207 | 207 | celery_config.update(ini_settings) |
|
208 | 208 | celery_app.config_from_object(celery_config) |
|
209 | 209 | |
|
210 | 210 | celery_app.conf.update({'PYRAMID_APP': app}) |
|
211 | 211 | celery_app.conf.update({'PYRAMID_ROOT': root}) |
|
212 | 212 | celery_app.conf.update({'PYRAMID_REQUEST': request}) |
|
213 | 213 | celery_app.conf.update({'PYRAMID_REGISTRY': registry}) |
|
214 | 214 | celery_app.conf.update({'PYRAMID_CLOSER': closer}) |
|
215 | 215 | |
|
216 | 216 | |
|
217 | 217 | def configure_celery(config, ini_location): |
|
218 | 218 | """ |
|
219 | 219 | Helper that is called from our application creation logic. It gives |
|
220 | 220 | connection info into running webapp and allows execution of tasks from |
|
221 | 221 | RhodeCode itself |
|
222 | 222 | """ |
|
223 | 223 | # store some globals into rhodecode |
|
224 | 224 | rhodecode.CELERY_ENABLED = str2bool( |
|
225 | 225 | config.registry.settings.get('use_celery')) |
|
226 | 226 | if rhodecode.CELERY_ENABLED: |
|
227 | 227 | log.info('Configuring celery based on `%s` file', ini_location) |
|
228 | 228 | setup_celery_app( |
|
229 | 229 | app=None, root=None, request=None, registry=config.registry, |
|
230 | 230 | closer=None, ini_location=ini_location) |
|
231 | 231 | |
|
232 | 232 | |
|
233 | 233 | def maybe_prepare_env(req): |
|
234 | 234 | environ = {} |
|
235 | 235 | try: |
|
236 | 236 | environ.update({ |
|
237 | 237 | 'PATH_INFO': req.environ['PATH_INFO'], |
|
238 | 238 | 'SCRIPT_NAME': req.environ['SCRIPT_NAME'], |
|
239 | 239 | 'HTTP_HOST':req.environ.get('HTTP_HOST', req.environ['SERVER_NAME']), |
|
240 | 240 | 'SERVER_NAME': req.environ['SERVER_NAME'], |
|
241 | 241 | 'SERVER_PORT': req.environ['SERVER_PORT'], |
|
242 | 242 | 'wsgi.url_scheme': req.environ['wsgi.url_scheme'], |
|
243 | 243 | }) |
|
244 | 244 | except Exception: |
|
245 | 245 | pass |
|
246 | 246 | |
|
247 | 247 | return environ |
|
248 | 248 | |
|
249 | 249 | |
|
250 | 250 | class RequestContextTask(Task): |
|
251 | 251 | """ |
|
252 | 252 | This is a celery task which will create a rhodecode app instance context |
|
253 | 253 | for the task, patch pyramid with the original request |
|
254 | 254 | that created the task and also add the user to the context. |
|
255 | 255 | """ |
|
256 | 256 | |
|
257 | 257 | def apply_async(self, args=None, kwargs=None, task_id=None, producer=None, |
|
258 | 258 | link=None, link_error=None, shadow=None, **options): |
|
259 | 259 | """ queue the job to run (we are in web request context here) """ |
|
260 | 260 | |
|
261 | 261 | req = get_current_request() |
|
262 | 262 | |
|
263 | 263 | # web case |
|
264 | 264 | if hasattr(req, 'user'): |
|
265 | 265 | ip_addr = req.user.ip_addr |
|
266 | 266 | user_id = req.user.user_id |
|
267 | 267 | |
|
268 | 268 | # api case |
|
269 | 269 | elif hasattr(req, 'rpc_user'): |
|
270 | 270 | ip_addr = req.rpc_user.ip_addr |
|
271 | 271 | user_id = req.rpc_user.user_id |
|
272 | 272 | else: |
|
273 | 273 | raise Exception( |
|
274 | 274 | 'Unable to fetch required data from request: {}. \n' |
|
275 | 275 | 'This task is required to be executed from context of ' |
|
276 | 276 | 'request in a webapp'.format(repr(req))) |
|
277 | 277 | |
|
278 | 278 | if req: |
|
279 | 279 | # we hook into kwargs since it is the only way to pass our data to |
|
280 | 280 | # the celery worker |
|
281 | 281 | environ = maybe_prepare_env(req) |
|
282 | 282 | options['headers'] = options.get('headers', {}) |
|
283 | 283 | options['headers'].update({ |
|
284 | 284 | 'rhodecode_proxy_data': { |
|
285 | 285 | 'environ': environ, |
|
286 | 286 | 'auth_user': { |
|
287 | 287 | 'ip_addr': ip_addr, |
|
288 | 288 | 'user_id': user_id |
|
289 | 289 | }, |
|
290 | 290 | } |
|
291 | 291 | }) |
|
292 | 292 | |
|
293 | 293 | return super(RequestContextTask, self).apply_async( |
|
294 | 294 | args, kwargs, task_id, producer, link, link_error, shadow, **options) |
|
295 | 295 | |
|
296 | 296 | def __call__(self, *args, **kwargs): |
|
297 | 297 | """ rebuild the context and then run task on celery worker """ |
|
298 | 298 | |
|
299 | 299 | proxy_data = getattr(self.request, 'rhodecode_proxy_data', None) |
|
300 | 300 | if not proxy_data: |
|
301 | 301 | return super(RequestContextTask, self).__call__(*args, **kwargs) |
|
302 | 302 | |
|
303 | 303 | log.debug('using celery proxy data to run task: %r', proxy_data) |
|
304 | 304 | # re-inject and register threadlocals for proper routing support |
|
305 | 305 | request = prepare_request(proxy_data['environ']) |
|
306 | 306 | request.user = AuthUser(user_id=proxy_data['auth_user']['user_id'], |
|
307 | 307 | ip_addr=proxy_data['auth_user']['ip_addr']) |
|
308 | 308 | |
|
309 | 309 | return super(RequestContextTask, self).__call__(*args, **kwargs) |
|
310 | 310 |
@@ -1,410 +1,414 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2012-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | RhodeCode task modules, containing all task that suppose to be run |
|
23 | 23 | by celery daemon |
|
24 | 24 | """ |
|
25 | 25 | |
|
26 | 26 | import os |
|
27 | 27 | import time |
|
28 | 28 | |
|
29 | 29 | from pyramid import compat |
|
30 | 30 | from pyramid_mailer.mailer import Mailer |
|
31 | 31 | from pyramid_mailer.message import Message |
|
32 | 32 | from email.utils import formatdate |
|
33 | 33 | |
|
34 | 34 | import rhodecode |
|
35 | 35 | from rhodecode.lib import audit_logger |
|
36 | 36 | from rhodecode.lib.celerylib import get_logger, async_task, RequestContextTask, run_task |
|
37 | 37 | from rhodecode.lib import hooks_base |
|
38 | 38 | from rhodecode.lib.utils2 import safe_int, str2bool, aslist |
|
39 | from rhodecode.lib.statsd_client import StatsdClient | |
|
39 | 40 | from rhodecode.model.db import ( |
|
40 | 41 | Session, IntegrityError, true, Repository, RepoGroup, User) |
|
41 | 42 | from rhodecode.model.permission import PermissionModel |
|
42 | 43 | |
|
43 | 44 | |
|
44 | 45 | @async_task(ignore_result=True, base=RequestContextTask) |
|
45 | 46 | def send_email(recipients, subject, body='', html_body='', email_config=None, |
|
46 | 47 | extra_headers=None): |
|
47 | 48 | """ |
|
48 | 49 | Sends an email with defined parameters from the .ini files. |
|
49 | 50 | |
|
50 | 51 | :param recipients: list of recipients, it this is empty the defined email |
|
51 | 52 | address from field 'email_to' is used instead |
|
52 | 53 | :param subject: subject of the mail |
|
53 | 54 | :param body: body of the mail |
|
54 | 55 | :param html_body: html version of body |
|
55 | 56 | :param email_config: specify custom configuration for mailer |
|
56 | 57 | :param extra_headers: specify custom headers |
|
57 | 58 | """ |
|
58 | 59 | log = get_logger(send_email) |
|
59 | 60 | |
|
60 | 61 | email_config = email_config or rhodecode.CONFIG |
|
61 | 62 | |
|
62 | 63 | mail_server = email_config.get('smtp_server') or None |
|
63 | 64 | if mail_server is None: |
|
64 | 65 | log.error("SMTP server information missing. Sending email failed. " |
|
65 | 66 | "Make sure that `smtp_server` variable is configured " |
|
66 | 67 | "inside the .ini file") |
|
67 | 68 | return False |
|
68 | 69 | |
|
69 | 70 | subject = "%s %s" % (email_config.get('email_prefix', ''), subject) |
|
70 | 71 | |
|
71 | 72 | if recipients: |
|
72 | 73 | if isinstance(recipients, compat.string_types): |
|
73 | 74 | recipients = recipients.split(',') |
|
74 | 75 | else: |
|
75 | 76 | # if recipients are not defined we send to email_config + all admins |
|
76 | 77 | admins = [] |
|
77 | 78 | for u in User.query().filter(User.admin == true()).all(): |
|
78 | 79 | if u.email: |
|
79 | 80 | admins.append(u.email) |
|
80 | 81 | recipients = [] |
|
81 | 82 | config_email = email_config.get('email_to') |
|
82 | 83 | if config_email: |
|
83 | 84 | recipients += [config_email] |
|
84 | 85 | recipients += admins |
|
85 | 86 | |
|
86 | 87 | # translate our LEGACY config into the one that pyramid_mailer supports |
|
87 | 88 | email_conf = dict( |
|
88 | 89 | host=mail_server, |
|
89 | 90 | port=email_config.get('smtp_port', 25), |
|
90 | 91 | username=email_config.get('smtp_username'), |
|
91 | 92 | password=email_config.get('smtp_password'), |
|
92 | 93 | |
|
93 | 94 | tls=str2bool(email_config.get('smtp_use_tls')), |
|
94 | 95 | ssl=str2bool(email_config.get('smtp_use_ssl')), |
|
95 | 96 | |
|
96 | 97 | # SSL key file |
|
97 | 98 | # keyfile='', |
|
98 | 99 | |
|
99 | 100 | # SSL certificate file |
|
100 | 101 | # certfile='', |
|
101 | 102 | |
|
102 | 103 | # Location of maildir |
|
103 | 104 | # queue_path='', |
|
104 | 105 | |
|
105 | 106 | default_sender=email_config.get('app_email_from', 'RhodeCode-noreply@rhodecode.com'), |
|
106 | 107 | |
|
107 | 108 | debug=str2bool(email_config.get('smtp_debug')), |
|
108 | 109 | # /usr/sbin/sendmail Sendmail executable |
|
109 | 110 | # sendmail_app='', |
|
110 | 111 | |
|
111 | 112 | # {sendmail_app} -t -i -f {sender} Template for sendmail execution |
|
112 | 113 | # sendmail_template='', |
|
113 | 114 | ) |
|
114 | 115 | |
|
115 | 116 | if extra_headers is None: |
|
116 | 117 | extra_headers = {} |
|
117 | 118 | |
|
118 | 119 | extra_headers.setdefault('Date', formatdate(time.time())) |
|
119 | 120 | |
|
120 | 121 | if 'thread_ids' in extra_headers: |
|
121 | 122 | thread_ids = extra_headers.pop('thread_ids') |
|
122 | 123 | extra_headers['References'] = ' '.join('<{}>'.format(t) for t in thread_ids) |
|
123 | 124 | |
|
124 | 125 | try: |
|
125 | 126 | mailer = Mailer(**email_conf) |
|
126 | 127 | |
|
127 | 128 | message = Message(subject=subject, |
|
128 | 129 | sender=email_conf['default_sender'], |
|
129 | 130 | recipients=recipients, |
|
130 | 131 | body=body, html=html_body, |
|
131 | 132 | extra_headers=extra_headers) |
|
132 | 133 | mailer.send_immediately(message) |
|
134 | statsd = StatsdClient.statsd | |
|
135 | if statsd: | |
|
136 | statsd.incr('rhodecode_email_sent_total') | |
|
133 | 137 | |
|
134 | 138 | except Exception: |
|
135 | 139 | log.exception('Mail sending failed') |
|
136 | 140 | return False |
|
137 | 141 | return True |
|
138 | 142 | |
|
139 | 143 | |
|
140 | 144 | @async_task(ignore_result=True, base=RequestContextTask) |
|
141 | 145 | def create_repo(form_data, cur_user): |
|
142 | 146 | from rhodecode.model.repo import RepoModel |
|
143 | 147 | from rhodecode.model.user import UserModel |
|
144 | 148 | from rhodecode.model.scm import ScmModel |
|
145 | 149 | from rhodecode.model.settings import SettingsModel |
|
146 | 150 | |
|
147 | 151 | log = get_logger(create_repo) |
|
148 | 152 | |
|
149 | 153 | cur_user = UserModel()._get_user(cur_user) |
|
150 | 154 | owner = cur_user |
|
151 | 155 | |
|
152 | 156 | repo_name = form_data['repo_name'] |
|
153 | 157 | repo_name_full = form_data['repo_name_full'] |
|
154 | 158 | repo_type = form_data['repo_type'] |
|
155 | 159 | description = form_data['repo_description'] |
|
156 | 160 | private = form_data['repo_private'] |
|
157 | 161 | clone_uri = form_data.get('clone_uri') |
|
158 | 162 | repo_group = safe_int(form_data['repo_group']) |
|
159 | 163 | copy_fork_permissions = form_data.get('copy_permissions') |
|
160 | 164 | copy_group_permissions = form_data.get('repo_copy_permissions') |
|
161 | 165 | fork_of = form_data.get('fork_parent_id') |
|
162 | 166 | state = form_data.get('repo_state', Repository.STATE_PENDING) |
|
163 | 167 | |
|
164 | 168 | # repo creation defaults, private and repo_type are filled in form |
|
165 | 169 | defs = SettingsModel().get_default_repo_settings(strip_prefix=True) |
|
166 | 170 | enable_statistics = form_data.get( |
|
167 | 171 | 'enable_statistics', defs.get('repo_enable_statistics')) |
|
168 | 172 | enable_locking = form_data.get( |
|
169 | 173 | 'enable_locking', defs.get('repo_enable_locking')) |
|
170 | 174 | enable_downloads = form_data.get( |
|
171 | 175 | 'enable_downloads', defs.get('repo_enable_downloads')) |
|
172 | 176 | |
|
173 | 177 | # set landing rev based on default branches for SCM |
|
174 | 178 | landing_ref, _label = ScmModel.backend_landing_ref(repo_type) |
|
175 | 179 | |
|
176 | 180 | try: |
|
177 | 181 | RepoModel()._create_repo( |
|
178 | 182 | repo_name=repo_name_full, |
|
179 | 183 | repo_type=repo_type, |
|
180 | 184 | description=description, |
|
181 | 185 | owner=owner, |
|
182 | 186 | private=private, |
|
183 | 187 | clone_uri=clone_uri, |
|
184 | 188 | repo_group=repo_group, |
|
185 | 189 | landing_rev=landing_ref, |
|
186 | 190 | fork_of=fork_of, |
|
187 | 191 | copy_fork_permissions=copy_fork_permissions, |
|
188 | 192 | copy_group_permissions=copy_group_permissions, |
|
189 | 193 | enable_statistics=enable_statistics, |
|
190 | 194 | enable_locking=enable_locking, |
|
191 | 195 | enable_downloads=enable_downloads, |
|
192 | 196 | state=state |
|
193 | 197 | ) |
|
194 | 198 | Session().commit() |
|
195 | 199 | |
|
196 | 200 | # now create this repo on Filesystem |
|
197 | 201 | RepoModel()._create_filesystem_repo( |
|
198 | 202 | repo_name=repo_name, |
|
199 | 203 | repo_type=repo_type, |
|
200 | 204 | repo_group=RepoModel()._get_repo_group(repo_group), |
|
201 | 205 | clone_uri=clone_uri, |
|
202 | 206 | ) |
|
203 | 207 | repo = Repository.get_by_repo_name(repo_name_full) |
|
204 | 208 | hooks_base.create_repository(created_by=owner.username, **repo.get_dict()) |
|
205 | 209 | |
|
206 | 210 | # update repo commit caches initially |
|
207 | 211 | repo.update_commit_cache() |
|
208 | 212 | |
|
209 | 213 | # set new created state |
|
210 | 214 | repo.set_state(Repository.STATE_CREATED) |
|
211 | 215 | repo_id = repo.repo_id |
|
212 | 216 | repo_data = repo.get_api_data() |
|
213 | 217 | |
|
214 | 218 | audit_logger.store( |
|
215 | 219 | 'repo.create', action_data={'data': repo_data}, |
|
216 | 220 | user=cur_user, |
|
217 | 221 | repo=audit_logger.RepoWrap(repo_name=repo_name, repo_id=repo_id)) |
|
218 | 222 | |
|
219 | 223 | Session().commit() |
|
220 | 224 | |
|
221 | 225 | PermissionModel().trigger_permission_flush() |
|
222 | 226 | |
|
223 | 227 | except Exception as e: |
|
224 | 228 | log.warning('Exception occurred when creating repository, ' |
|
225 | 229 | 'doing cleanup...', exc_info=True) |
|
226 | 230 | if isinstance(e, IntegrityError): |
|
227 | 231 | Session().rollback() |
|
228 | 232 | |
|
229 | 233 | # rollback things manually ! |
|
230 | 234 | repo = Repository.get_by_repo_name(repo_name_full) |
|
231 | 235 | if repo: |
|
232 | 236 | Repository.delete(repo.repo_id) |
|
233 | 237 | Session().commit() |
|
234 | 238 | RepoModel()._delete_filesystem_repo(repo) |
|
235 | 239 | log.info('Cleanup of repo %s finished', repo_name_full) |
|
236 | 240 | raise |
|
237 | 241 | |
|
238 | 242 | return True |
|
239 | 243 | |
|
240 | 244 | |
|
241 | 245 | @async_task(ignore_result=True, base=RequestContextTask) |
|
242 | 246 | def create_repo_fork(form_data, cur_user): |
|
243 | 247 | """ |
|
244 | 248 | Creates a fork of repository using internal VCS methods |
|
245 | 249 | """ |
|
246 | 250 | from rhodecode.model.repo import RepoModel |
|
247 | 251 | from rhodecode.model.user import UserModel |
|
248 | 252 | |
|
249 | 253 | log = get_logger(create_repo_fork) |
|
250 | 254 | |
|
251 | 255 | cur_user = UserModel()._get_user(cur_user) |
|
252 | 256 | owner = cur_user |
|
253 | 257 | |
|
254 | 258 | repo_name = form_data['repo_name'] # fork in this case |
|
255 | 259 | repo_name_full = form_data['repo_name_full'] |
|
256 | 260 | repo_type = form_data['repo_type'] |
|
257 | 261 | description = form_data['description'] |
|
258 | 262 | private = form_data['private'] |
|
259 | 263 | clone_uri = form_data.get('clone_uri') |
|
260 | 264 | repo_group = safe_int(form_data['repo_group']) |
|
261 | 265 | landing_ref = form_data['landing_rev'] |
|
262 | 266 | copy_fork_permissions = form_data.get('copy_permissions') |
|
263 | 267 | fork_id = safe_int(form_data.get('fork_parent_id')) |
|
264 | 268 | |
|
265 | 269 | try: |
|
266 | 270 | fork_of = RepoModel()._get_repo(fork_id) |
|
267 | 271 | RepoModel()._create_repo( |
|
268 | 272 | repo_name=repo_name_full, |
|
269 | 273 | repo_type=repo_type, |
|
270 | 274 | description=description, |
|
271 | 275 | owner=owner, |
|
272 | 276 | private=private, |
|
273 | 277 | clone_uri=clone_uri, |
|
274 | 278 | repo_group=repo_group, |
|
275 | 279 | landing_rev=landing_ref, |
|
276 | 280 | fork_of=fork_of, |
|
277 | 281 | copy_fork_permissions=copy_fork_permissions |
|
278 | 282 | ) |
|
279 | 283 | |
|
280 | 284 | Session().commit() |
|
281 | 285 | |
|
282 | 286 | base_path = Repository.base_path() |
|
283 | 287 | source_repo_path = os.path.join(base_path, fork_of.repo_name) |
|
284 | 288 | |
|
285 | 289 | # now create this repo on Filesystem |
|
286 | 290 | RepoModel()._create_filesystem_repo( |
|
287 | 291 | repo_name=repo_name, |
|
288 | 292 | repo_type=repo_type, |
|
289 | 293 | repo_group=RepoModel()._get_repo_group(repo_group), |
|
290 | 294 | clone_uri=source_repo_path, |
|
291 | 295 | ) |
|
292 | 296 | repo = Repository.get_by_repo_name(repo_name_full) |
|
293 | 297 | hooks_base.create_repository(created_by=owner.username, **repo.get_dict()) |
|
294 | 298 | |
|
295 | 299 | # update repo commit caches initially |
|
296 | 300 | config = repo._config |
|
297 | 301 | config.set('extensions', 'largefiles', '') |
|
298 | 302 | repo.update_commit_cache(config=config) |
|
299 | 303 | |
|
300 | 304 | # set new created state |
|
301 | 305 | repo.set_state(Repository.STATE_CREATED) |
|
302 | 306 | |
|
303 | 307 | repo_id = repo.repo_id |
|
304 | 308 | repo_data = repo.get_api_data() |
|
305 | 309 | audit_logger.store( |
|
306 | 310 | 'repo.fork', action_data={'data': repo_data}, |
|
307 | 311 | user=cur_user, |
|
308 | 312 | repo=audit_logger.RepoWrap(repo_name=repo_name, repo_id=repo_id)) |
|
309 | 313 | |
|
310 | 314 | Session().commit() |
|
311 | 315 | except Exception as e: |
|
312 | 316 | log.warning('Exception occurred when forking repository, ' |
|
313 | 317 | 'doing cleanup...', exc_info=True) |
|
314 | 318 | if isinstance(e, IntegrityError): |
|
315 | 319 | Session().rollback() |
|
316 | 320 | |
|
317 | 321 | # rollback things manually ! |
|
318 | 322 | repo = Repository.get_by_repo_name(repo_name_full) |
|
319 | 323 | if repo: |
|
320 | 324 | Repository.delete(repo.repo_id) |
|
321 | 325 | Session().commit() |
|
322 | 326 | RepoModel()._delete_filesystem_repo(repo) |
|
323 | 327 | log.info('Cleanup of repo %s finished', repo_name_full) |
|
324 | 328 | raise |
|
325 | 329 | |
|
326 | 330 | return True |
|
327 | 331 | |
|
328 | 332 | |
|
329 | 333 | @async_task(ignore_result=True) |
|
330 | 334 | def repo_maintenance(repoid): |
|
331 | 335 | from rhodecode.lib import repo_maintenance as repo_maintenance_lib |
|
332 | 336 | log = get_logger(repo_maintenance) |
|
333 | 337 | repo = Repository.get_by_id_or_repo_name(repoid) |
|
334 | 338 | if repo: |
|
335 | 339 | maintenance = repo_maintenance_lib.RepoMaintenance() |
|
336 | 340 | tasks = maintenance.get_tasks_for_repo(repo) |
|
337 | 341 | log.debug('Executing %s tasks on repo `%s`', tasks, repoid) |
|
338 | 342 | executed_types = maintenance.execute(repo) |
|
339 | 343 | log.debug('Got execution results %s', executed_types) |
|
340 | 344 | else: |
|
341 | 345 | log.debug('Repo `%s` not found or without a clone_url', repoid) |
|
342 | 346 | |
|
343 | 347 | |
|
344 | 348 | @async_task(ignore_result=True) |
|
345 | 349 | def check_for_update(send_email_notification=True, email_recipients=None): |
|
346 | 350 | from rhodecode.model.update import UpdateModel |
|
347 | 351 | from rhodecode.model.notification import EmailNotificationModel |
|
348 | 352 | |
|
349 | 353 | log = get_logger(check_for_update) |
|
350 | 354 | update_url = UpdateModel().get_update_url() |
|
351 | 355 | cur_ver = rhodecode.__version__ |
|
352 | 356 | |
|
353 | 357 | try: |
|
354 | 358 | data = UpdateModel().get_update_data(update_url) |
|
355 | 359 | |
|
356 | 360 | current_ver = UpdateModel().get_stored_version(fallback=cur_ver) |
|
357 | 361 | latest_ver = data['versions'][0]['version'] |
|
358 | 362 | UpdateModel().store_version(latest_ver) |
|
359 | 363 | |
|
360 | 364 | if send_email_notification: |
|
361 | 365 | log.debug('Send email notification is enabled. ' |
|
362 | 366 | 'Current RhodeCode version: %s, latest known: %s', current_ver, latest_ver) |
|
363 | 367 | if UpdateModel().is_outdated(current_ver, latest_ver): |
|
364 | 368 | |
|
365 | 369 | email_kwargs = { |
|
366 | 370 | 'current_ver': current_ver, |
|
367 | 371 | 'latest_ver': latest_ver, |
|
368 | 372 | } |
|
369 | 373 | |
|
370 | 374 | (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email( |
|
371 | 375 | EmailNotificationModel.TYPE_UPDATE_AVAILABLE, **email_kwargs) |
|
372 | 376 | |
|
373 | 377 | email_recipients = aslist(email_recipients, sep=',') or \ |
|
374 | 378 | [user.email for user in User.get_all_super_admins()] |
|
375 | 379 | run_task(send_email, email_recipients, subject, |
|
376 | 380 | email_body_plaintext, email_body) |
|
377 | 381 | |
|
378 | 382 | except Exception: |
|
379 | 383 | pass |
|
380 | 384 | |
|
381 | 385 | |
|
382 | 386 | @async_task(ignore_result=False) |
|
383 | 387 | def beat_check(*args, **kwargs): |
|
384 | 388 | log = get_logger(beat_check) |
|
385 | 389 | log.info('%r: Got args: %r and kwargs %r', beat_check, args, kwargs) |
|
386 | 390 | return time.time() |
|
387 | 391 | |
|
388 | 392 | |
|
389 | 393 | def sync_last_update_for_objects(*args, **kwargs): |
|
390 | 394 | skip_repos = kwargs.get('skip_repos') |
|
391 | 395 | if not skip_repos: |
|
392 | 396 | repos = Repository.query() \ |
|
393 | 397 | .order_by(Repository.group_id.asc()) |
|
394 | 398 | |
|
395 | 399 | for repo in repos: |
|
396 | 400 | repo.update_commit_cache() |
|
397 | 401 | |
|
398 | 402 | skip_groups = kwargs.get('skip_groups') |
|
399 | 403 | if not skip_groups: |
|
400 | 404 | repo_groups = RepoGroup.query() \ |
|
401 | 405 | .filter(RepoGroup.group_parent_id == None) |
|
402 | 406 | |
|
403 | 407 | for root_gr in repo_groups: |
|
404 | 408 | for repo_gr in reversed(root_gr.recursive_groups()): |
|
405 | 409 | repo_gr.update_commit_cache() |
|
406 | 410 | |
|
407 | 411 | |
|
408 | 412 | @async_task(ignore_result=True) |
|
409 | 413 | def sync_last_update(*args, **kwargs): |
|
410 | 414 | sync_last_update_for_objects(*args, **kwargs) |
@@ -1,535 +1,535 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2013-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | |
|
22 | 22 | """ |
|
23 | 23 | Set of hooks run by RhodeCode Enterprise |
|
24 | 24 | """ |
|
25 | 25 | |
|
26 | 26 | import os |
|
27 | 27 | import logging |
|
28 | 28 | |
|
29 | 29 | import rhodecode |
|
30 | 30 | from rhodecode import events |
|
31 | 31 | from rhodecode.lib import helpers as h |
|
32 | 32 | from rhodecode.lib import audit_logger |
|
33 | 33 | from rhodecode.lib.utils2 import safe_str |
|
34 | 34 | from rhodecode.lib.exceptions import ( |
|
35 | 35 | HTTPLockedRC, HTTPBranchProtected, UserCreationError) |
|
36 | 36 | from rhodecode.model.db import Repository, User |
|
37 | 37 | from rhodecode.lib.statsd_client import StatsdClient |
|
38 | 38 | |
|
39 | 39 | log = logging.getLogger(__name__) |
|
40 | 40 | |
|
41 | 41 | |
|
42 | 42 | class HookResponse(object): |
|
43 | 43 | def __init__(self, status, output): |
|
44 | 44 | self.status = status |
|
45 | 45 | self.output = output |
|
46 | 46 | |
|
47 | 47 | def __add__(self, other): |
|
48 | 48 | other_status = getattr(other, 'status', 0) |
|
49 | 49 | new_status = max(self.status, other_status) |
|
50 | 50 | other_output = getattr(other, 'output', '') |
|
51 | 51 | new_output = self.output + other_output |
|
52 | 52 | |
|
53 | 53 | return HookResponse(new_status, new_output) |
|
54 | 54 | |
|
55 | 55 | def __bool__(self): |
|
56 | 56 | return self.status == 0 |
|
57 | 57 | |
|
58 | 58 | |
|
59 | 59 | def is_shadow_repo(extras): |
|
60 | 60 | """ |
|
61 | 61 | Returns ``True`` if this is an action executed against a shadow repository. |
|
62 | 62 | """ |
|
63 | 63 | return extras['is_shadow_repo'] |
|
64 | 64 | |
|
65 | 65 | |
|
66 | 66 | def _get_scm_size(alias, root_path): |
|
67 | 67 | |
|
68 | 68 | if not alias.startswith('.'): |
|
69 | 69 | alias += '.' |
|
70 | 70 | |
|
71 | 71 | size_scm, size_root = 0, 0 |
|
72 | 72 | for path, unused_dirs, files in os.walk(safe_str(root_path)): |
|
73 | 73 | if path.find(alias) != -1: |
|
74 | 74 | for f in files: |
|
75 | 75 | try: |
|
76 | 76 | size_scm += os.path.getsize(os.path.join(path, f)) |
|
77 | 77 | except OSError: |
|
78 | 78 | pass |
|
79 | 79 | else: |
|
80 | 80 | for f in files: |
|
81 | 81 | try: |
|
82 | 82 | size_root += os.path.getsize(os.path.join(path, f)) |
|
83 | 83 | except OSError: |
|
84 | 84 | pass |
|
85 | 85 | |
|
86 | 86 | size_scm_f = h.format_byte_size_binary(size_scm) |
|
87 | 87 | size_root_f = h.format_byte_size_binary(size_root) |
|
88 | 88 | size_total_f = h.format_byte_size_binary(size_root + size_scm) |
|
89 | 89 | |
|
90 | 90 | return size_scm_f, size_root_f, size_total_f |
|
91 | 91 | |
|
92 | 92 | |
|
93 | 93 | # actual hooks called by Mercurial internally, and GIT by our Python Hooks |
|
94 | 94 | def repo_size(extras): |
|
95 | 95 | """Present size of repository after push.""" |
|
96 | 96 | repo = Repository.get_by_repo_name(extras.repository) |
|
97 | 97 | vcs_part = safe_str(u'.%s' % repo.repo_type) |
|
98 | 98 | size_vcs, size_root, size_total = _get_scm_size(vcs_part, |
|
99 | 99 | repo.repo_full_path) |
|
100 | 100 | msg = ('Repository `%s` size summary %s:%s repo:%s total:%s\n' |
|
101 | 101 | % (repo.repo_name, vcs_part, size_vcs, size_root, size_total)) |
|
102 | 102 | return HookResponse(0, msg) |
|
103 | 103 | |
|
104 | 104 | |
|
105 | 105 | def pre_push(extras): |
|
106 | 106 | """ |
|
107 | 107 | Hook executed before pushing code. |
|
108 | 108 | |
|
109 | 109 | It bans pushing when the repository is locked. |
|
110 | 110 | """ |
|
111 | 111 | |
|
112 | 112 | user = User.get_by_username(extras.username) |
|
113 | 113 | output = '' |
|
114 | 114 | if extras.locked_by[0] and user.user_id != int(extras.locked_by[0]): |
|
115 | 115 | locked_by = User.get(extras.locked_by[0]).username |
|
116 | 116 | reason = extras.locked_by[2] |
|
117 | 117 | # this exception is interpreted in git/hg middlewares and based |
|
118 | 118 | # on that proper return code is server to client |
|
119 | 119 | _http_ret = HTTPLockedRC( |
|
120 | 120 | _locked_by_explanation(extras.repository, locked_by, reason)) |
|
121 | 121 | if str(_http_ret.code).startswith('2'): |
|
122 | 122 | # 2xx Codes don't raise exceptions |
|
123 | 123 | output = _http_ret.title |
|
124 | 124 | else: |
|
125 | 125 | raise _http_ret |
|
126 | 126 | |
|
127 | 127 | hook_response = '' |
|
128 | 128 | if not is_shadow_repo(extras): |
|
129 | 129 | if extras.commit_ids and extras.check_branch_perms: |
|
130 | 130 | |
|
131 | 131 | auth_user = user.AuthUser() |
|
132 | 132 | repo = Repository.get_by_repo_name(extras.repository) |
|
133 | 133 | affected_branches = [] |
|
134 | 134 | if repo.repo_type == 'hg': |
|
135 | 135 | for entry in extras.commit_ids: |
|
136 | 136 | if entry['type'] == 'branch': |
|
137 | 137 | is_forced = bool(entry['multiple_heads']) |
|
138 | 138 | affected_branches.append([entry['name'], is_forced]) |
|
139 | 139 | elif repo.repo_type == 'git': |
|
140 | 140 | for entry in extras.commit_ids: |
|
141 | 141 | if entry['type'] == 'heads': |
|
142 | 142 | is_forced = bool(entry['pruned_sha']) |
|
143 | 143 | affected_branches.append([entry['name'], is_forced]) |
|
144 | 144 | |
|
145 | 145 | for branch_name, is_forced in affected_branches: |
|
146 | 146 | |
|
147 | 147 | rule, branch_perm = auth_user.get_rule_and_branch_permission( |
|
148 | 148 | extras.repository, branch_name) |
|
149 | 149 | if not branch_perm: |
|
150 | 150 | # no branch permission found for this branch, just keep checking |
|
151 | 151 | continue |
|
152 | 152 | |
|
153 | 153 | if branch_perm == 'branch.push_force': |
|
154 | 154 | continue |
|
155 | 155 | elif branch_perm == 'branch.push' and is_forced is False: |
|
156 | 156 | continue |
|
157 | 157 | elif branch_perm == 'branch.push' and is_forced is True: |
|
158 | 158 | halt_message = 'Branch `{}` changes rejected by rule {}. ' \ |
|
159 | 159 | 'FORCE PUSH FORBIDDEN.'.format(branch_name, rule) |
|
160 | 160 | else: |
|
161 | 161 | halt_message = 'Branch `{}` changes rejected by rule {}.'.format( |
|
162 | 162 | branch_name, rule) |
|
163 | 163 | |
|
164 | 164 | if halt_message: |
|
165 | 165 | _http_ret = HTTPBranchProtected(halt_message) |
|
166 | 166 | raise _http_ret |
|
167 | 167 | |
|
168 | 168 | # Propagate to external components. This is done after checking the |
|
169 | 169 | # lock, for consistent behavior. |
|
170 | 170 | hook_response = pre_push_extension( |
|
171 | 171 | repo_store_path=Repository.base_path(), **extras) |
|
172 | 172 | events.trigger(events.RepoPrePushEvent( |
|
173 | 173 | repo_name=extras.repository, extras=extras)) |
|
174 | 174 | |
|
175 | 175 | return HookResponse(0, output) + hook_response |
|
176 | 176 | |
|
177 | 177 | |
|
178 | 178 | def pre_pull(extras): |
|
179 | 179 | """ |
|
180 | 180 | Hook executed before pulling the code. |
|
181 | 181 | |
|
182 | 182 | It bans pulling when the repository is locked. |
|
183 | 183 | """ |
|
184 | 184 | |
|
185 | 185 | output = '' |
|
186 | 186 | if extras.locked_by[0]: |
|
187 | 187 | locked_by = User.get(extras.locked_by[0]).username |
|
188 | 188 | reason = extras.locked_by[2] |
|
189 | 189 | # this exception is interpreted in git/hg middlewares and based |
|
190 | 190 | # on that proper return code is server to client |
|
191 | 191 | _http_ret = HTTPLockedRC( |
|
192 | 192 | _locked_by_explanation(extras.repository, locked_by, reason)) |
|
193 | 193 | if str(_http_ret.code).startswith('2'): |
|
194 | 194 | # 2xx Codes don't raise exceptions |
|
195 | 195 | output = _http_ret.title |
|
196 | 196 | else: |
|
197 | 197 | raise _http_ret |
|
198 | 198 | |
|
199 | 199 | # Propagate to external components. This is done after checking the |
|
200 | 200 | # lock, for consistent behavior. |
|
201 | 201 | hook_response = '' |
|
202 | 202 | if not is_shadow_repo(extras): |
|
203 | 203 | extras.hook_type = extras.hook_type or 'pre_pull' |
|
204 | 204 | hook_response = pre_pull_extension( |
|
205 | 205 | repo_store_path=Repository.base_path(), **extras) |
|
206 | 206 | events.trigger(events.RepoPrePullEvent( |
|
207 | 207 | repo_name=extras.repository, extras=extras)) |
|
208 | 208 | |
|
209 | 209 | return HookResponse(0, output) + hook_response |
|
210 | 210 | |
|
211 | 211 | |
|
212 | 212 | def post_pull(extras): |
|
213 | 213 | """Hook executed after client pulls the code.""" |
|
214 | 214 | |
|
215 | 215 | audit_user = audit_logger.UserWrap( |
|
216 | 216 | username=extras.username, |
|
217 | 217 | ip_addr=extras.ip) |
|
218 | 218 | repo = audit_logger.RepoWrap(repo_name=extras.repository) |
|
219 | 219 | audit_logger.store( |
|
220 | 220 | 'user.pull', action_data={'user_agent': extras.user_agent}, |
|
221 | 221 | user=audit_user, repo=repo, commit=True) |
|
222 | 222 | |
|
223 | 223 | statsd = StatsdClient.statsd |
|
224 | 224 | if statsd: |
|
225 | statsd.incr('rhodecode_pull') | |
|
225 | statsd.incr('rhodecode_pull_total') | |
|
226 | 226 | |
|
227 | 227 | output = '' |
|
228 | 228 | # make lock is a tri state False, True, None. We only make lock on True |
|
229 | 229 | if extras.make_lock is True and not is_shadow_repo(extras): |
|
230 | 230 | user = User.get_by_username(extras.username) |
|
231 | 231 | Repository.lock(Repository.get_by_repo_name(extras.repository), |
|
232 | 232 | user.user_id, |
|
233 | 233 | lock_reason=Repository.LOCK_PULL) |
|
234 | 234 | msg = 'Made lock on repo `%s`' % (extras.repository,) |
|
235 | 235 | output += msg |
|
236 | 236 | |
|
237 | 237 | if extras.locked_by[0]: |
|
238 | 238 | locked_by = User.get(extras.locked_by[0]).username |
|
239 | 239 | reason = extras.locked_by[2] |
|
240 | 240 | _http_ret = HTTPLockedRC( |
|
241 | 241 | _locked_by_explanation(extras.repository, locked_by, reason)) |
|
242 | 242 | if str(_http_ret.code).startswith('2'): |
|
243 | 243 | # 2xx Codes don't raise exceptions |
|
244 | 244 | output += _http_ret.title |
|
245 | 245 | |
|
246 | 246 | # Propagate to external components. |
|
247 | 247 | hook_response = '' |
|
248 | 248 | if not is_shadow_repo(extras): |
|
249 | 249 | extras.hook_type = extras.hook_type or 'post_pull' |
|
250 | 250 | hook_response = post_pull_extension( |
|
251 | 251 | repo_store_path=Repository.base_path(), **extras) |
|
252 | 252 | events.trigger(events.RepoPullEvent( |
|
253 | 253 | repo_name=extras.repository, extras=extras)) |
|
254 | 254 | |
|
255 | 255 | return HookResponse(0, output) + hook_response |
|
256 | 256 | |
|
257 | 257 | |
|
258 | 258 | def post_push(extras): |
|
259 | 259 | """Hook executed after user pushes to the repository.""" |
|
260 | 260 | commit_ids = extras.commit_ids |
|
261 | 261 | |
|
262 | 262 | # log the push call |
|
263 | 263 | audit_user = audit_logger.UserWrap( |
|
264 | 264 | username=extras.username, ip_addr=extras.ip) |
|
265 | 265 | repo = audit_logger.RepoWrap(repo_name=extras.repository) |
|
266 | 266 | audit_logger.store( |
|
267 | 267 | 'user.push', action_data={ |
|
268 | 268 | 'user_agent': extras.user_agent, |
|
269 | 269 | 'commit_ids': commit_ids[:400]}, |
|
270 | 270 | user=audit_user, repo=repo, commit=True) |
|
271 | 271 | |
|
272 | 272 | statsd = StatsdClient.statsd |
|
273 | 273 | if statsd: |
|
274 | statsd.incr('rhodecode_push') | |
|
274 | statsd.incr('rhodecode_push_total') | |
|
275 | 275 | |
|
276 | 276 | # Propagate to external components. |
|
277 | 277 | output = '' |
|
278 | 278 | # make lock is a tri state False, True, None. We only release lock on False |
|
279 | 279 | if extras.make_lock is False and not is_shadow_repo(extras): |
|
280 | 280 | Repository.unlock(Repository.get_by_repo_name(extras.repository)) |
|
281 | 281 | msg = 'Released lock on repo `{}`\n'.format(safe_str(extras.repository)) |
|
282 | 282 | output += msg |
|
283 | 283 | |
|
284 | 284 | if extras.locked_by[0]: |
|
285 | 285 | locked_by = User.get(extras.locked_by[0]).username |
|
286 | 286 | reason = extras.locked_by[2] |
|
287 | 287 | _http_ret = HTTPLockedRC( |
|
288 | 288 | _locked_by_explanation(extras.repository, locked_by, reason)) |
|
289 | 289 | # TODO: johbo: if not? |
|
290 | 290 | if str(_http_ret.code).startswith('2'): |
|
291 | 291 | # 2xx Codes don't raise exceptions |
|
292 | 292 | output += _http_ret.title |
|
293 | 293 | |
|
294 | 294 | if extras.new_refs: |
|
295 | 295 | tmpl = '{}/{}/pull-request/new?{{ref_type}}={{ref_name}}'.format( |
|
296 | 296 | safe_str(extras.server_url), safe_str(extras.repository)) |
|
297 | 297 | |
|
298 | 298 | for branch_name in extras.new_refs['branches']: |
|
299 | 299 | output += 'RhodeCode: open pull request link: {}\n'.format( |
|
300 | 300 | tmpl.format(ref_type='branch', ref_name=safe_str(branch_name))) |
|
301 | 301 | |
|
302 | 302 | for book_name in extras.new_refs['bookmarks']: |
|
303 | 303 | output += 'RhodeCode: open pull request link: {}\n'.format( |
|
304 | 304 | tmpl.format(ref_type='bookmark', ref_name=safe_str(book_name))) |
|
305 | 305 | |
|
306 | 306 | hook_response = '' |
|
307 | 307 | if not is_shadow_repo(extras): |
|
308 | 308 | hook_response = post_push_extension( |
|
309 | 309 | repo_store_path=Repository.base_path(), |
|
310 | 310 | **extras) |
|
311 | 311 | events.trigger(events.RepoPushEvent( |
|
312 | 312 | repo_name=extras.repository, pushed_commit_ids=commit_ids, extras=extras)) |
|
313 | 313 | |
|
314 | 314 | output += 'RhodeCode: push completed\n' |
|
315 | 315 | return HookResponse(0, output) + hook_response |
|
316 | 316 | |
|
317 | 317 | |
|
318 | 318 | def _locked_by_explanation(repo_name, user_name, reason): |
|
319 | 319 | message = ( |
|
320 | 320 | 'Repository `%s` locked by user `%s`. Reason:`%s`' |
|
321 | 321 | % (repo_name, user_name, reason)) |
|
322 | 322 | return message |
|
323 | 323 | |
|
324 | 324 | |
|
325 | 325 | def check_allowed_create_user(user_dict, created_by, **kwargs): |
|
326 | 326 | # pre create hooks |
|
327 | 327 | if pre_create_user.is_active(): |
|
328 | 328 | hook_result = pre_create_user(created_by=created_by, **user_dict) |
|
329 | 329 | allowed = hook_result.status == 0 |
|
330 | 330 | if not allowed: |
|
331 | 331 | reason = hook_result.output |
|
332 | 332 | raise UserCreationError(reason) |
|
333 | 333 | |
|
334 | 334 | |
|
335 | 335 | class ExtensionCallback(object): |
|
336 | 336 | """ |
|
337 | 337 | Forwards a given call to rcextensions, sanitizes keyword arguments. |
|
338 | 338 | |
|
339 | 339 | Does check if there is an extension active for that hook. If it is |
|
340 | 340 | there, it will forward all `kwargs_keys` keyword arguments to the |
|
341 | 341 | extension callback. |
|
342 | 342 | """ |
|
343 | 343 | |
|
344 | 344 | def __init__(self, hook_name, kwargs_keys): |
|
345 | 345 | self._hook_name = hook_name |
|
346 | 346 | self._kwargs_keys = set(kwargs_keys) |
|
347 | 347 | |
|
348 | 348 | def __call__(self, *args, **kwargs): |
|
349 | 349 | log.debug('Calling extension callback for `%s`', self._hook_name) |
|
350 | 350 | callback = self._get_callback() |
|
351 | 351 | if not callback: |
|
352 | 352 | log.debug('extension callback `%s` not found, skipping...', self._hook_name) |
|
353 | 353 | return |
|
354 | 354 | |
|
355 | 355 | kwargs_to_pass = {} |
|
356 | 356 | for key in self._kwargs_keys: |
|
357 | 357 | try: |
|
358 | 358 | kwargs_to_pass[key] = kwargs[key] |
|
359 | 359 | except KeyError: |
|
360 | 360 | log.error('Failed to fetch %s key from given kwargs. ' |
|
361 | 361 | 'Expected keys: %s', key, self._kwargs_keys) |
|
362 | 362 | raise |
|
363 | 363 | |
|
364 | 364 | # backward compat for removed api_key for old hooks. This was it works |
|
365 | 365 | # with older rcextensions that require api_key present |
|
366 | 366 | if self._hook_name in ['CREATE_USER_HOOK', 'DELETE_USER_HOOK']: |
|
367 | 367 | kwargs_to_pass['api_key'] = '_DEPRECATED_' |
|
368 | 368 | return callback(**kwargs_to_pass) |
|
369 | 369 | |
|
370 | 370 | def is_active(self): |
|
371 | 371 | return hasattr(rhodecode.EXTENSIONS, self._hook_name) |
|
372 | 372 | |
|
373 | 373 | def _get_callback(self): |
|
374 | 374 | return getattr(rhodecode.EXTENSIONS, self._hook_name, None) |
|
375 | 375 | |
|
376 | 376 | |
|
377 | 377 | pre_pull_extension = ExtensionCallback( |
|
378 | 378 | hook_name='PRE_PULL_HOOK', |
|
379 | 379 | kwargs_keys=( |
|
380 | 380 | 'server_url', 'config', 'scm', 'username', 'ip', 'action', |
|
381 | 381 | 'repository', 'hook_type', 'user_agent', 'repo_store_path',)) |
|
382 | 382 | |
|
383 | 383 | |
|
384 | 384 | post_pull_extension = ExtensionCallback( |
|
385 | 385 | hook_name='PULL_HOOK', |
|
386 | 386 | kwargs_keys=( |
|
387 | 387 | 'server_url', 'config', 'scm', 'username', 'ip', 'action', |
|
388 | 388 | 'repository', 'hook_type', 'user_agent', 'repo_store_path',)) |
|
389 | 389 | |
|
390 | 390 | |
|
391 | 391 | pre_push_extension = ExtensionCallback( |
|
392 | 392 | hook_name='PRE_PUSH_HOOK', |
|
393 | 393 | kwargs_keys=( |
|
394 | 394 | 'server_url', 'config', 'scm', 'username', 'ip', 'action', |
|
395 | 395 | 'repository', 'repo_store_path', 'commit_ids', 'hook_type', 'user_agent',)) |
|
396 | 396 | |
|
397 | 397 | |
|
398 | 398 | post_push_extension = ExtensionCallback( |
|
399 | 399 | hook_name='PUSH_HOOK', |
|
400 | 400 | kwargs_keys=( |
|
401 | 401 | 'server_url', 'config', 'scm', 'username', 'ip', 'action', |
|
402 | 402 | 'repository', 'repo_store_path', 'commit_ids', 'hook_type', 'user_agent',)) |
|
403 | 403 | |
|
404 | 404 | |
|
405 | 405 | pre_create_user = ExtensionCallback( |
|
406 | 406 | hook_name='PRE_CREATE_USER_HOOK', |
|
407 | 407 | kwargs_keys=( |
|
408 | 408 | 'username', 'password', 'email', 'firstname', 'lastname', 'active', |
|
409 | 409 | 'admin', 'created_by')) |
|
410 | 410 | |
|
411 | 411 | |
|
412 | 412 | create_pull_request = ExtensionCallback( |
|
413 | 413 | hook_name='CREATE_PULL_REQUEST', |
|
414 | 414 | kwargs_keys=( |
|
415 | 415 | 'server_url', 'config', 'scm', 'username', 'ip', 'action', |
|
416 | 416 | 'repository', 'pull_request_id', 'url', 'title', 'description', |
|
417 | 417 | 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status', |
|
418 | 418 | 'mergeable', 'source', 'target', 'author', 'reviewers')) |
|
419 | 419 | |
|
420 | 420 | |
|
421 | 421 | merge_pull_request = ExtensionCallback( |
|
422 | 422 | hook_name='MERGE_PULL_REQUEST', |
|
423 | 423 | kwargs_keys=( |
|
424 | 424 | 'server_url', 'config', 'scm', 'username', 'ip', 'action', |
|
425 | 425 | 'repository', 'pull_request_id', 'url', 'title', 'description', |
|
426 | 426 | 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status', |
|
427 | 427 | 'mergeable', 'source', 'target', 'author', 'reviewers')) |
|
428 | 428 | |
|
429 | 429 | |
|
430 | 430 | close_pull_request = ExtensionCallback( |
|
431 | 431 | hook_name='CLOSE_PULL_REQUEST', |
|
432 | 432 | kwargs_keys=( |
|
433 | 433 | 'server_url', 'config', 'scm', 'username', 'ip', 'action', |
|
434 | 434 | 'repository', 'pull_request_id', 'url', 'title', 'description', |
|
435 | 435 | 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status', |
|
436 | 436 | 'mergeable', 'source', 'target', 'author', 'reviewers')) |
|
437 | 437 | |
|
438 | 438 | |
|
439 | 439 | review_pull_request = ExtensionCallback( |
|
440 | 440 | hook_name='REVIEW_PULL_REQUEST', |
|
441 | 441 | kwargs_keys=( |
|
442 | 442 | 'server_url', 'config', 'scm', 'username', 'ip', 'action', |
|
443 | 443 | 'repository', 'pull_request_id', 'url', 'title', 'description', |
|
444 | 444 | 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status', |
|
445 | 445 | 'mergeable', 'source', 'target', 'author', 'reviewers')) |
|
446 | 446 | |
|
447 | 447 | |
|
448 | 448 | comment_pull_request = ExtensionCallback( |
|
449 | 449 | hook_name='COMMENT_PULL_REQUEST', |
|
450 | 450 | kwargs_keys=( |
|
451 | 451 | 'server_url', 'config', 'scm', 'username', 'ip', 'action', |
|
452 | 452 | 'repository', 'pull_request_id', 'url', 'title', 'description', |
|
453 | 453 | 'status', 'comment', 'created_on', 'updated_on', 'commit_ids', 'review_status', |
|
454 | 454 | 'mergeable', 'source', 'target', 'author', 'reviewers')) |
|
455 | 455 | |
|
456 | 456 | |
|
457 | 457 | comment_edit_pull_request = ExtensionCallback( |
|
458 | 458 | hook_name='COMMENT_EDIT_PULL_REQUEST', |
|
459 | 459 | kwargs_keys=( |
|
460 | 460 | 'server_url', 'config', 'scm', 'username', 'ip', 'action', |
|
461 | 461 | 'repository', 'pull_request_id', 'url', 'title', 'description', |
|
462 | 462 | 'status', 'comment', 'created_on', 'updated_on', 'commit_ids', 'review_status', |
|
463 | 463 | 'mergeable', 'source', 'target', 'author', 'reviewers')) |
|
464 | 464 | |
|
465 | 465 | |
|
466 | 466 | update_pull_request = ExtensionCallback( |
|
467 | 467 | hook_name='UPDATE_PULL_REQUEST', |
|
468 | 468 | kwargs_keys=( |
|
469 | 469 | 'server_url', 'config', 'scm', 'username', 'ip', 'action', |
|
470 | 470 | 'repository', 'pull_request_id', 'url', 'title', 'description', |
|
471 | 471 | 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status', |
|
472 | 472 | 'mergeable', 'source', 'target', 'author', 'reviewers')) |
|
473 | 473 | |
|
474 | 474 | |
|
475 | 475 | create_user = ExtensionCallback( |
|
476 | 476 | hook_name='CREATE_USER_HOOK', |
|
477 | 477 | kwargs_keys=( |
|
478 | 478 | 'username', 'full_name_or_username', 'full_contact', 'user_id', |
|
479 | 479 | 'name', 'firstname', 'short_contact', 'admin', 'lastname', |
|
480 | 480 | 'ip_addresses', 'extern_type', 'extern_name', |
|
481 | 481 | 'email', 'api_keys', 'last_login', |
|
482 | 482 | 'full_name', 'active', 'password', 'emails', |
|
483 | 483 | 'inherit_default_permissions', 'created_by', 'created_on')) |
|
484 | 484 | |
|
485 | 485 | |
|
486 | 486 | delete_user = ExtensionCallback( |
|
487 | 487 | hook_name='DELETE_USER_HOOK', |
|
488 | 488 | kwargs_keys=( |
|
489 | 489 | 'username', 'full_name_or_username', 'full_contact', 'user_id', |
|
490 | 490 | 'name', 'firstname', 'short_contact', 'admin', 'lastname', |
|
491 | 491 | 'ip_addresses', |
|
492 | 492 | 'email', 'last_login', |
|
493 | 493 | 'full_name', 'active', 'password', 'emails', |
|
494 | 494 | 'inherit_default_permissions', 'deleted_by')) |
|
495 | 495 | |
|
496 | 496 | |
|
497 | 497 | create_repository = ExtensionCallback( |
|
498 | 498 | hook_name='CREATE_REPO_HOOK', |
|
499 | 499 | kwargs_keys=( |
|
500 | 500 | 'repo_name', 'repo_type', 'description', 'private', 'created_on', |
|
501 | 501 | 'enable_downloads', 'repo_id', 'user_id', 'enable_statistics', |
|
502 | 502 | 'clone_uri', 'fork_id', 'group_id', 'created_by')) |
|
503 | 503 | |
|
504 | 504 | |
|
505 | 505 | delete_repository = ExtensionCallback( |
|
506 | 506 | hook_name='DELETE_REPO_HOOK', |
|
507 | 507 | kwargs_keys=( |
|
508 | 508 | 'repo_name', 'repo_type', 'description', 'private', 'created_on', |
|
509 | 509 | 'enable_downloads', 'repo_id', 'user_id', 'enable_statistics', |
|
510 | 510 | 'clone_uri', 'fork_id', 'group_id', 'deleted_by', 'deleted_on')) |
|
511 | 511 | |
|
512 | 512 | |
|
513 | 513 | comment_commit_repository = ExtensionCallback( |
|
514 | 514 | hook_name='COMMENT_COMMIT_REPO_HOOK', |
|
515 | 515 | kwargs_keys=( |
|
516 | 516 | 'repo_name', 'repo_type', 'description', 'private', 'created_on', |
|
517 | 517 | 'enable_downloads', 'repo_id', 'user_id', 'enable_statistics', |
|
518 | 518 | 'clone_uri', 'fork_id', 'group_id', |
|
519 | 519 | 'repository', 'created_by', 'comment', 'commit')) |
|
520 | 520 | |
|
521 | 521 | comment_edit_commit_repository = ExtensionCallback( |
|
522 | 522 | hook_name='COMMENT_EDIT_COMMIT_REPO_HOOK', |
|
523 | 523 | kwargs_keys=( |
|
524 | 524 | 'repo_name', 'repo_type', 'description', 'private', 'created_on', |
|
525 | 525 | 'enable_downloads', 'repo_id', 'user_id', 'enable_statistics', |
|
526 | 526 | 'clone_uri', 'fork_id', 'group_id', |
|
527 | 527 | 'repository', 'created_by', 'comment', 'commit')) |
|
528 | 528 | |
|
529 | 529 | |
|
530 | 530 | create_repository_group = ExtensionCallback( |
|
531 | 531 | hook_name='CREATE_REPO_GROUP_HOOK', |
|
532 | 532 | kwargs_keys=( |
|
533 | 533 | 'group_name', 'group_parent_id', 'group_description', |
|
534 | 534 | 'group_id', 'user_id', 'created_by', 'created_on', |
|
535 | 535 | 'enable_locking')) |
@@ -1,90 +1,89 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2016-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import time |
|
22 | 22 | import logging |
|
23 | 23 | |
|
24 | 24 | import rhodecode |
|
25 | 25 | from rhodecode.lib.auth import AuthUser |
|
26 | 26 | from rhodecode.lib.base import get_ip_addr, get_access_path, get_user_agent |
|
27 | 27 | from rhodecode.lib.utils2 import safe_str, get_current_rhodecode_user |
|
28 | 28 | |
|
29 | 29 | |
|
30 | 30 | log = logging.getLogger(__name__) |
|
31 | 31 | |
|
32 | 32 | |
|
33 | 33 | class RequestWrapperTween(object): |
|
34 | 34 | def __init__(self, handler, registry): |
|
35 | 35 | self.handler = handler |
|
36 | 36 | self.registry = registry |
|
37 | 37 | |
|
38 | 38 | # one-time configuration code goes here |
|
39 | 39 | |
|
40 | 40 | def _get_user_info(self, request): |
|
41 | 41 | user = get_current_rhodecode_user(request) |
|
42 | 42 | if not user: |
|
43 | 43 | user = AuthUser.repr_user(ip=get_ip_addr(request.environ)) |
|
44 | 44 | return user |
|
45 | 45 | |
|
46 | 46 | def __call__(self, request): |
|
47 | 47 | start = time.time() |
|
48 | 48 | log.debug('Starting request time measurement') |
|
49 | 49 | try: |
|
50 | 50 | response = self.handler(request) |
|
51 | 51 | finally: |
|
52 | 52 | count = request.request_count() |
|
53 | 53 | _ver_ = rhodecode.__version__ |
|
54 | 54 | _path = safe_str(get_access_path(request.environ)) |
|
55 | 55 | _auth_user = self._get_user_info(request) |
|
56 | ||
|
56 | user_id = getattr(_auth_user, 'user_id', _auth_user) | |
|
57 | 57 | total = time.time() - start |
|
58 | 58 | log.info( |
|
59 | 59 | 'Req[%4s] %s %s Request to %s time: %.4fs [%s], RhodeCode %s', |
|
60 | 60 | count, _auth_user, request.environ.get('REQUEST_METHOD'), |
|
61 | 61 | _path, total, get_user_agent(request. environ), _ver_ |
|
62 | 62 | ) |
|
63 | 63 | |
|
64 | 64 | statsd = request.registry.statsd |
|
65 | 65 | if statsd: |
|
66 | 66 | resp_code = response.status_code |
|
67 | user_id = getattr(_auth_user, 'user_id', _auth_user) | |
|
68 | 67 | elapsed_time_ms = 1000.0 * total |
|
69 | 68 | statsd.timing( |
|
70 | 69 | 'rhodecode_req_timing', elapsed_time_ms, |
|
71 | 70 | tags=[ |
|
72 |
|
|
|
71 | "view_name:{}".format(request.matched_route.name), | |
|
73 | 72 | #"user:{}".format(user_id), |
|
74 | 73 | "code:{}".format(resp_code) |
|
75 | 74 | ] |
|
76 | 75 | ) |
|
77 | 76 | statsd.incr( |
|
78 | 'rhodecode_req_count', tags=[ | |
|
79 |
|
|
|
77 | 'rhodecode_req_count_total', tags=[ | |
|
78 | "view_name:{}".format(request.matched_route.name), | |
|
80 | 79 | #"user:{}".format(user_id), |
|
81 | 80 | "code:{}".format(resp_code) |
|
82 | 81 | ]) |
|
83 | 82 | |
|
84 | 83 | return response |
|
85 | 84 | |
|
86 | 85 | |
|
87 | 86 | def includeme(config): |
|
88 | 87 | config.add_tween( |
|
89 | 88 | 'rhodecode.lib.middleware.request_wrapper.RequestWrapperTween', |
|
90 | 89 | ) |
General Comments 0
You need to be logged in to leave comments.
Login now