Show More
The requested changes are too big and content was truncated. Show full diff
@@ -1,543 +1,542 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2011-2018 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import inspect |
|
22 | 22 | import itertools |
|
23 | 23 | import logging |
|
24 | 24 | import types |
|
25 | 25 | import fnmatch |
|
26 | 26 | |
|
27 | 27 | import decorator |
|
28 | 28 | import venusian |
|
29 | 29 | from collections import OrderedDict |
|
30 | 30 | |
|
31 | 31 | from pyramid.exceptions import ConfigurationError |
|
32 | 32 | from pyramid.renderers import render |
|
33 | 33 | from pyramid.response import Response |
|
34 | 34 | from pyramid.httpexceptions import HTTPNotFound |
|
35 | 35 | |
|
36 | 36 | from rhodecode.api.exc import ( |
|
37 | 37 | JSONRPCBaseError, JSONRPCError, JSONRPCForbidden, JSONRPCValidationError) |
|
38 | 38 | from rhodecode.apps._base import TemplateArgs |
|
39 | 39 | from rhodecode.lib.auth import AuthUser |
|
40 | 40 | from rhodecode.lib.base import get_ip_addr, attach_context_attributes |
|
41 | 41 | from rhodecode.lib.ext_json import json |
|
42 | 42 | from rhodecode.lib.utils2 import safe_str |
|
43 | 43 | from rhodecode.lib.plugins.utils import get_plugin_settings |
|
44 | 44 | from rhodecode.model.db import User, UserApiKeys |
|
45 | 45 | |
|
46 | 46 | log = logging.getLogger(__name__) |
|
47 | 47 | |
|
48 | 48 | DEFAULT_RENDERER = 'jsonrpc_renderer' |
|
49 | 49 | DEFAULT_URL = '/_admin/apiv2' |
|
50 | 50 | |
|
51 | 51 | |
|
52 | 52 | def find_methods(jsonrpc_methods, pattern): |
|
53 | 53 | matches = OrderedDict() |
|
54 | 54 | if not isinstance(pattern, (list, tuple)): |
|
55 | 55 | pattern = [pattern] |
|
56 | 56 | |
|
57 | 57 | for single_pattern in pattern: |
|
58 | 58 | for method_name, method in jsonrpc_methods.items(): |
|
59 | 59 | if fnmatch.fnmatch(method_name, single_pattern): |
|
60 | 60 | matches[method_name] = method |
|
61 | 61 | return matches |
|
62 | 62 | |
|
63 | 63 | |
|
64 | 64 | class ExtJsonRenderer(object): |
|
65 | 65 | """ |
|
66 | 66 | Custom renderer that mkaes use of our ext_json lib |
|
67 | 67 | |
|
68 | 68 | """ |
|
69 | 69 | |
|
70 | 70 | def __init__(self, serializer=json.dumps, **kw): |
|
71 | 71 | """ Any keyword arguments will be passed to the ``serializer`` |
|
72 | 72 | function.""" |
|
73 | 73 | self.serializer = serializer |
|
74 | 74 | self.kw = kw |
|
75 | 75 | |
|
76 | 76 | def __call__(self, info): |
|
77 | 77 | """ Returns a plain JSON-encoded string with content-type |
|
78 | 78 | ``application/json``. The content-type may be overridden by |
|
79 | 79 | setting ``request.response.content_type``.""" |
|
80 | 80 | |
|
81 | 81 | def _render(value, system): |
|
82 | 82 | request = system.get('request') |
|
83 | 83 | if request is not None: |
|
84 | 84 | response = request.response |
|
85 | 85 | ct = response.content_type |
|
86 | 86 | if ct == response.default_content_type: |
|
87 | 87 | response.content_type = 'application/json' |
|
88 | 88 | |
|
89 | 89 | return self.serializer(value, **self.kw) |
|
90 | 90 | |
|
91 | 91 | return _render |
|
92 | 92 | |
|
93 | 93 | |
|
94 | 94 | def jsonrpc_response(request, result): |
|
95 | 95 | rpc_id = getattr(request, 'rpc_id', None) |
|
96 | 96 | response = request.response |
|
97 | 97 | |
|
98 | 98 | # store content_type before render is called |
|
99 | 99 | ct = response.content_type |
|
100 | 100 | |
|
101 | 101 | ret_value = '' |
|
102 | 102 | if rpc_id: |
|
103 | 103 | ret_value = { |
|
104 | 104 | 'id': rpc_id, |
|
105 | 105 | 'result': result, |
|
106 | 106 | 'error': None, |
|
107 | 107 | } |
|
108 | 108 | |
|
109 | 109 | # fetch deprecation warnings, and store it inside results |
|
110 | 110 | deprecation = getattr(request, 'rpc_deprecation', None) |
|
111 | 111 | if deprecation: |
|
112 | 112 | ret_value['DEPRECATION_WARNING'] = deprecation |
|
113 | 113 | |
|
114 | 114 | raw_body = render(DEFAULT_RENDERER, ret_value, request=request) |
|
115 | 115 | response.body = safe_str(raw_body, response.charset) |
|
116 | 116 | |
|
117 | 117 | if ct == response.default_content_type: |
|
118 | 118 | response.content_type = 'application/json' |
|
119 | 119 | |
|
120 | 120 | return response |
|
121 | 121 | |
|
122 | 122 | |
|
123 | 123 | def jsonrpc_error(request, message, retid=None, code=None): |
|
124 | 124 | """ |
|
125 | 125 | Generate a Response object with a JSON-RPC error body |
|
126 | 126 | |
|
127 | 127 | :param code: |
|
128 | 128 | :param retid: |
|
129 | 129 | :param message: |
|
130 | 130 | """ |
|
131 | 131 | err_dict = {'id': retid, 'result': None, 'error': message} |
|
132 | 132 | body = render(DEFAULT_RENDERER, err_dict, request=request).encode('utf-8') |
|
133 | 133 | return Response( |
|
134 | 134 | body=body, |
|
135 | 135 | status=code, |
|
136 | 136 | content_type='application/json' |
|
137 | 137 | ) |
|
138 | 138 | |
|
139 | 139 | |
|
140 | 140 | def exception_view(exc, request): |
|
141 | 141 | rpc_id = getattr(request, 'rpc_id', None) |
|
142 | 142 | |
|
143 | 143 | fault_message = 'undefined error' |
|
144 | 144 | if isinstance(exc, JSONRPCError): |
|
145 | 145 | fault_message = exc.message |
|
146 | 146 | log.debug('json-rpc error rpc_id:%s "%s"', rpc_id, fault_message) |
|
147 | 147 | elif isinstance(exc, JSONRPCValidationError): |
|
148 | 148 | colander_exc = exc.colander_exception |
|
149 | 149 | # TODO(marcink): think maybe of nicer way to serialize errors ? |
|
150 | 150 | fault_message = colander_exc.asdict() |
|
151 | 151 | log.debug('json-rpc error rpc_id:%s "%s"', rpc_id, fault_message) |
|
152 | 152 | elif isinstance(exc, JSONRPCForbidden): |
|
153 | 153 | fault_message = 'Access was denied to this resource.' |
|
154 | 154 | log.warning('json-rpc forbidden call rpc_id:%s "%s"', rpc_id, fault_message) |
|
155 | 155 | elif isinstance(exc, HTTPNotFound): |
|
156 | 156 | method = request.rpc_method |
|
157 | 157 | log.debug('json-rpc method `%s` not found in list of ' |
|
158 | 158 | 'api calls: %s, rpc_id:%s', |
|
159 | 159 | method, request.registry.jsonrpc_methods.keys(), rpc_id) |
|
160 | 160 | |
|
161 | 161 | similar = 'none' |
|
162 | 162 | try: |
|
163 | 163 | similar_paterns = ['*{}*'.format(x) for x in method.split('_')] |
|
164 | 164 | similar_found = find_methods( |
|
165 | 165 | request.registry.jsonrpc_methods, similar_paterns) |
|
166 | 166 | similar = ', '.join(similar_found.keys()) or similar |
|
167 | 167 | except Exception: |
|
168 | 168 | # make the whole above block safe |
|
169 | 169 | pass |
|
170 | 170 | |
|
171 | 171 | fault_message = "No such method: {}. Similar methods: {}".format( |
|
172 | 172 | method, similar) |
|
173 | 173 | |
|
174 | 174 | return jsonrpc_error(request, fault_message, rpc_id) |
|
175 | 175 | |
|
176 | 176 | |
|
177 | 177 | def request_view(request): |
|
178 | 178 | """ |
|
179 | 179 | Main request handling method. It handles all logic to call a specific |
|
180 | 180 | exposed method |
|
181 | 181 | """ |
|
182 | 182 | |
|
183 | 183 | # check if we can find this session using api_key, get_by_auth_token |
|
184 | 184 | # search not expired tokens only |
|
185 | 185 | |
|
186 | 186 | try: |
|
187 | 187 | api_user = User.get_by_auth_token(request.rpc_api_key) |
|
188 | 188 | |
|
189 | 189 | if api_user is None: |
|
190 | 190 | return jsonrpc_error( |
|
191 | 191 | request, retid=request.rpc_id, message='Invalid API KEY') |
|
192 | 192 | |
|
193 | 193 | if not api_user.active: |
|
194 | 194 | return jsonrpc_error( |
|
195 | 195 | request, retid=request.rpc_id, |
|
196 | 196 | message='Request from this user not allowed') |
|
197 | 197 | |
|
198 | 198 | # check if we are allowed to use this IP |
|
199 | 199 | auth_u = AuthUser( |
|
200 | 200 | api_user.user_id, request.rpc_api_key, ip_addr=request.rpc_ip_addr) |
|
201 | 201 | if not auth_u.ip_allowed: |
|
202 | 202 | return jsonrpc_error( |
|
203 | 203 | request, retid=request.rpc_id, |
|
204 | 204 | message='Request from IP:%s not allowed' % ( |
|
205 | 205 | request.rpc_ip_addr,)) |
|
206 | 206 | else: |
|
207 |
log.info('Access for IP:%s allowed' |
|
|
207 | log.info('Access for IP:%s allowed', request.rpc_ip_addr) | |
|
208 | 208 | |
|
209 | 209 | # register our auth-user |
|
210 | 210 | request.rpc_user = auth_u |
|
211 | 211 | request.environ['rc_auth_user_id'] = auth_u.user_id |
|
212 | 212 | |
|
213 | 213 | # now check if token is valid for API |
|
214 | 214 | auth_token = request.rpc_api_key |
|
215 | 215 | token_match = api_user.authenticate_by_token( |
|
216 | 216 | auth_token, roles=[UserApiKeys.ROLE_API]) |
|
217 | 217 | invalid_token = not token_match |
|
218 | 218 | |
|
219 | 219 | log.debug('Checking if API KEY is valid with proper role') |
|
220 | 220 | if invalid_token: |
|
221 | 221 | return jsonrpc_error( |
|
222 | 222 | request, retid=request.rpc_id, |
|
223 | 223 | message='API KEY invalid or, has bad role for an API call') |
|
224 | 224 | |
|
225 | 225 | except Exception: |
|
226 | 226 | log.exception('Error on API AUTH') |
|
227 | 227 | return jsonrpc_error( |
|
228 | 228 | request, retid=request.rpc_id, message='Invalid API KEY') |
|
229 | 229 | |
|
230 | 230 | method = request.rpc_method |
|
231 | 231 | func = request.registry.jsonrpc_methods[method] |
|
232 | 232 | |
|
233 | 233 | # now that we have a method, add request._req_params to |
|
234 | 234 | # self.kargs and dispatch control to WGIController |
|
235 | 235 | argspec = inspect.getargspec(func) |
|
236 | 236 | arglist = argspec[0] |
|
237 | 237 | defaults = map(type, argspec[3] or []) |
|
238 | 238 | default_empty = types.NotImplementedType |
|
239 | 239 | |
|
240 | 240 | # kw arguments required by this method |
|
241 | 241 | func_kwargs = dict(itertools.izip_longest( |
|
242 | 242 | reversed(arglist), reversed(defaults), fillvalue=default_empty)) |
|
243 | 243 | |
|
244 | 244 | # This attribute will need to be first param of a method that uses |
|
245 | 245 | # api_key, which is translated to instance of user at that name |
|
246 | 246 | user_var = 'apiuser' |
|
247 | 247 | request_var = 'request' |
|
248 | 248 | |
|
249 | 249 | for arg in [user_var, request_var]: |
|
250 | 250 | if arg not in arglist: |
|
251 | 251 | return jsonrpc_error( |
|
252 | 252 | request, |
|
253 | 253 | retid=request.rpc_id, |
|
254 | 254 | message='This method [%s] does not support ' |
|
255 | 255 | 'required parameter `%s`' % (func.__name__, arg)) |
|
256 | 256 | |
|
257 | 257 | # get our arglist and check if we provided them as args |
|
258 | 258 | for arg, default in func_kwargs.items(): |
|
259 | 259 | if arg in [user_var, request_var]: |
|
260 | 260 | # user_var and request_var are pre-hardcoded parameters and we |
|
261 | 261 | # don't need to do any translation |
|
262 | 262 | continue |
|
263 | 263 | |
|
264 | 264 | # skip the required param check if it's default value is |
|
265 | 265 | # NotImplementedType (default_empty) |
|
266 | 266 | if default == default_empty and arg not in request.rpc_params: |
|
267 | 267 | return jsonrpc_error( |
|
268 | 268 | request, |
|
269 | 269 | retid=request.rpc_id, |
|
270 | 270 | message=('Missing non optional `%s` arg in JSON DATA' % arg) |
|
271 | 271 | ) |
|
272 | 272 | |
|
273 | 273 | # sanitize extra passed arguments |
|
274 | 274 | for k in request.rpc_params.keys()[:]: |
|
275 | 275 | if k not in func_kwargs: |
|
276 | 276 | del request.rpc_params[k] |
|
277 | 277 | |
|
278 | 278 | call_params = request.rpc_params |
|
279 | 279 | call_params.update({ |
|
280 | 280 | 'request': request, |
|
281 | 281 | 'apiuser': auth_u |
|
282 | 282 | }) |
|
283 | 283 | |
|
284 | 284 | # register some common functions for usage |
|
285 | 285 | attach_context_attributes( |
|
286 | 286 | TemplateArgs(), request, request.rpc_user.user_id) |
|
287 | 287 | |
|
288 | 288 | try: |
|
289 | 289 | ret_value = func(**call_params) |
|
290 | 290 | return jsonrpc_response(request, ret_value) |
|
291 | 291 | except JSONRPCBaseError: |
|
292 | 292 | raise |
|
293 | 293 | except Exception: |
|
294 | 294 | log.exception('Unhandled exception occurred on api call: %s', func) |
|
295 | 295 | return jsonrpc_error(request, retid=request.rpc_id, |
|
296 | 296 | message='Internal server error') |
|
297 | 297 | |
|
298 | 298 | |
|
299 | 299 | def setup_request(request): |
|
300 | 300 | """ |
|
301 | 301 | Parse a JSON-RPC request body. It's used inside the predicates method |
|
302 | 302 | to validate and bootstrap requests for usage in rpc calls. |
|
303 | 303 | |
|
304 | 304 | We need to raise JSONRPCError here if we want to return some errors back to |
|
305 | 305 | user. |
|
306 | 306 | """ |
|
307 | 307 | |
|
308 | 308 | log.debug('Executing setup request: %r', request) |
|
309 | 309 | request.rpc_ip_addr = get_ip_addr(request.environ) |
|
310 | 310 | # TODO(marcink): deprecate GET at some point |
|
311 | 311 | if request.method not in ['POST', 'GET']: |
|
312 | 312 | log.debug('unsupported request method "%s"', request.method) |
|
313 | 313 | raise JSONRPCError( |
|
314 | 314 | 'unsupported request method "%s". Please use POST' % request.method) |
|
315 | 315 | |
|
316 | 316 | if 'CONTENT_LENGTH' not in request.environ: |
|
317 | 317 | log.debug("No Content-Length") |
|
318 | 318 | raise JSONRPCError("Empty body, No Content-Length in request") |
|
319 | 319 | |
|
320 | 320 | else: |
|
321 | 321 | length = request.environ['CONTENT_LENGTH'] |
|
322 | 322 | log.debug('Content-Length: %s', length) |
|
323 | 323 | |
|
324 | 324 | if length == 0: |
|
325 | 325 | log.debug("Content-Length is 0") |
|
326 | 326 | raise JSONRPCError("Content-Length is 0") |
|
327 | 327 | |
|
328 | 328 | raw_body = request.body |
|
329 | 329 | try: |
|
330 | 330 | json_body = json.loads(raw_body) |
|
331 | 331 | except ValueError as e: |
|
332 | 332 | # catch JSON errors Here |
|
333 | 333 | raise JSONRPCError("JSON parse error ERR:%s RAW:%r" % (e, raw_body)) |
|
334 | 334 | |
|
335 | 335 | request.rpc_id = json_body.get('id') |
|
336 | 336 | request.rpc_method = json_body.get('method') |
|
337 | 337 | |
|
338 | 338 | # check required base parameters |
|
339 | 339 | try: |
|
340 | 340 | api_key = json_body.get('api_key') |
|
341 | 341 | if not api_key: |
|
342 | 342 | api_key = json_body.get('auth_token') |
|
343 | 343 | |
|
344 | 344 | if not api_key: |
|
345 | 345 | raise KeyError('api_key or auth_token') |
|
346 | 346 | |
|
347 | 347 | # TODO(marcink): support passing in token in request header |
|
348 | 348 | |
|
349 | 349 | request.rpc_api_key = api_key |
|
350 | 350 | request.rpc_id = json_body['id'] |
|
351 | 351 | request.rpc_method = json_body['method'] |
|
352 | 352 | request.rpc_params = json_body['args'] \ |
|
353 | 353 | if isinstance(json_body['args'], dict) else {} |
|
354 | 354 | |
|
355 | log.debug( | |
|
356 | 'method: %s, params: %s' % (request.rpc_method, request.rpc_params)) | |
|
355 | log.debug('method: %s, params: %s', request.rpc_method, request.rpc_params) | |
|
357 | 356 | except KeyError as e: |
|
358 | 357 | raise JSONRPCError('Incorrect JSON data. Missing %s' % e) |
|
359 | 358 | |
|
360 | 359 | log.debug('setup complete, now handling method:%s rpcid:%s', |
|
361 | 360 | request.rpc_method, request.rpc_id, ) |
|
362 | 361 | |
|
363 | 362 | |
|
364 | 363 | class RoutePredicate(object): |
|
365 | 364 | def __init__(self, val, config): |
|
366 | 365 | self.val = val |
|
367 | 366 | |
|
368 | 367 | def text(self): |
|
369 | 368 | return 'jsonrpc route = %s' % self.val |
|
370 | 369 | |
|
371 | 370 | phash = text |
|
372 | 371 | |
|
373 | 372 | def __call__(self, info, request): |
|
374 | 373 | if self.val: |
|
375 | 374 | # potentially setup and bootstrap our call |
|
376 | 375 | setup_request(request) |
|
377 | 376 | |
|
378 | 377 | # Always return True so that even if it isn't a valid RPC it |
|
379 | 378 | # will fall through to the underlaying handlers like notfound_view |
|
380 | 379 | return True |
|
381 | 380 | |
|
382 | 381 | |
|
383 | 382 | class NotFoundPredicate(object): |
|
384 | 383 | def __init__(self, val, config): |
|
385 | 384 | self.val = val |
|
386 | 385 | self.methods = config.registry.jsonrpc_methods |
|
387 | 386 | |
|
388 | 387 | def text(self): |
|
389 | 388 | return 'jsonrpc method not found = {}.'.format(self.val) |
|
390 | 389 | |
|
391 | 390 | phash = text |
|
392 | 391 | |
|
393 | 392 | def __call__(self, info, request): |
|
394 | 393 | return hasattr(request, 'rpc_method') |
|
395 | 394 | |
|
396 | 395 | |
|
397 | 396 | class MethodPredicate(object): |
|
398 | 397 | def __init__(self, val, config): |
|
399 | 398 | self.method = val |
|
400 | 399 | |
|
401 | 400 | def text(self): |
|
402 | 401 | return 'jsonrpc method = %s' % self.method |
|
403 | 402 | |
|
404 | 403 | phash = text |
|
405 | 404 | |
|
406 | 405 | def __call__(self, context, request): |
|
407 | 406 | # we need to explicitly return False here, so pyramid doesn't try to |
|
408 | 407 | # execute our view directly. We need our main handler to execute things |
|
409 | 408 | return getattr(request, 'rpc_method') == self.method |
|
410 | 409 | |
|
411 | 410 | |
|
412 | 411 | def add_jsonrpc_method(config, view, **kwargs): |
|
413 | 412 | # pop the method name |
|
414 | 413 | method = kwargs.pop('method', None) |
|
415 | 414 | |
|
416 | 415 | if method is None: |
|
417 | 416 | raise ConfigurationError( |
|
418 | 417 | 'Cannot register a JSON-RPC method without specifying the ' |
|
419 | 418 | '"method"') |
|
420 | 419 | |
|
421 | 420 | # we define custom predicate, to enable to detect conflicting methods, |
|
422 | 421 | # those predicates are kind of "translation" from the decorator variables |
|
423 | 422 | # to internal predicates names |
|
424 | 423 | |
|
425 | 424 | kwargs['jsonrpc_method'] = method |
|
426 | 425 | |
|
427 | 426 | # register our view into global view store for validation |
|
428 | 427 | config.registry.jsonrpc_methods[method] = view |
|
429 | 428 | |
|
430 | 429 | # we're using our main request_view handler, here, so each method |
|
431 | 430 | # has a unified handler for itself |
|
432 | 431 | config.add_view(request_view, route_name='apiv2', **kwargs) |
|
433 | 432 | |
|
434 | 433 | |
|
435 | 434 | class jsonrpc_method(object): |
|
436 | 435 | """ |
|
437 | 436 | decorator that works similar to @add_view_config decorator, |
|
438 | 437 | but tailored for our JSON RPC |
|
439 | 438 | """ |
|
440 | 439 | |
|
441 | 440 | venusian = venusian # for testing injection |
|
442 | 441 | |
|
443 | 442 | def __init__(self, method=None, **kwargs): |
|
444 | 443 | self.method = method |
|
445 | 444 | self.kwargs = kwargs |
|
446 | 445 | |
|
447 | 446 | def __call__(self, wrapped): |
|
448 | 447 | kwargs = self.kwargs.copy() |
|
449 | 448 | kwargs['method'] = self.method or wrapped.__name__ |
|
450 | 449 | depth = kwargs.pop('_depth', 0) |
|
451 | 450 | |
|
452 | 451 | def callback(context, name, ob): |
|
453 | 452 | config = context.config.with_package(info.module) |
|
454 | 453 | config.add_jsonrpc_method(view=ob, **kwargs) |
|
455 | 454 | |
|
456 | 455 | info = venusian.attach(wrapped, callback, category='pyramid', |
|
457 | 456 | depth=depth + 1) |
|
458 | 457 | if info.scope == 'class': |
|
459 | 458 | # ensure that attr is set if decorating a class method |
|
460 | 459 | kwargs.setdefault('attr', wrapped.__name__) |
|
461 | 460 | |
|
462 | 461 | kwargs['_info'] = info.codeinfo # fbo action_method |
|
463 | 462 | return wrapped |
|
464 | 463 | |
|
465 | 464 | |
|
466 | 465 | class jsonrpc_deprecated_method(object): |
|
467 | 466 | """ |
|
468 | 467 | Marks method as deprecated, adds log.warning, and inject special key to |
|
469 | 468 | the request variable to mark method as deprecated. |
|
470 | 469 | Also injects special docstring that extract_docs will catch to mark |
|
471 | 470 | method as deprecated. |
|
472 | 471 | |
|
473 | 472 | :param use_method: specify which method should be used instead of |
|
474 | 473 | the decorated one |
|
475 | 474 | |
|
476 | 475 | Use like:: |
|
477 | 476 | |
|
478 | 477 | @jsonrpc_method() |
|
479 | 478 | @jsonrpc_deprecated_method(use_method='new_func', deprecated_at_version='3.0.0') |
|
480 | 479 | def old_func(request, apiuser, arg1, arg2): |
|
481 | 480 | ... |
|
482 | 481 | """ |
|
483 | 482 | |
|
484 | 483 | def __init__(self, use_method, deprecated_at_version): |
|
485 | 484 | self.use_method = use_method |
|
486 | 485 | self.deprecated_at_version = deprecated_at_version |
|
487 | 486 | self.deprecated_msg = '' |
|
488 | 487 | |
|
489 | 488 | def __call__(self, func): |
|
490 | 489 | self.deprecated_msg = 'Please use method `{method}` instead.'.format( |
|
491 | 490 | method=self.use_method) |
|
492 | 491 | |
|
493 | 492 | docstring = """\n |
|
494 | 493 | .. deprecated:: {version} |
|
495 | 494 | |
|
496 | 495 | {deprecation_message} |
|
497 | 496 | |
|
498 | 497 | {original_docstring} |
|
499 | 498 | """ |
|
500 | 499 | func.__doc__ = docstring.format( |
|
501 | 500 | version=self.deprecated_at_version, |
|
502 | 501 | deprecation_message=self.deprecated_msg, |
|
503 | 502 | original_docstring=func.__doc__) |
|
504 | 503 | return decorator.decorator(self.__wrapper, func) |
|
505 | 504 | |
|
506 | 505 | def __wrapper(self, func, *fargs, **fkwargs): |
|
507 | 506 | log.warning('DEPRECATED API CALL on function %s, please ' |
|
508 | 507 | 'use `%s` instead', func, self.use_method) |
|
509 | 508 | # alter function docstring to mark as deprecated, this is picked up |
|
510 | 509 | # via fabric file that generates API DOC. |
|
511 | 510 | result = func(*fargs, **fkwargs) |
|
512 | 511 | |
|
513 | 512 | request = fargs[0] |
|
514 | 513 | request.rpc_deprecation = 'DEPRECATED METHOD ' + self.deprecated_msg |
|
515 | 514 | return result |
|
516 | 515 | |
|
517 | 516 | |
|
518 | 517 | def includeme(config): |
|
519 | 518 | plugin_module = 'rhodecode.api' |
|
520 | 519 | plugin_settings = get_plugin_settings( |
|
521 | 520 | plugin_module, config.registry.settings) |
|
522 | 521 | |
|
523 | 522 | if not hasattr(config.registry, 'jsonrpc_methods'): |
|
524 | 523 | config.registry.jsonrpc_methods = OrderedDict() |
|
525 | 524 | |
|
526 | 525 | # match filter by given method only |
|
527 | 526 | config.add_view_predicate('jsonrpc_method', MethodPredicate) |
|
528 | 527 | |
|
529 | 528 | config.add_renderer(DEFAULT_RENDERER, ExtJsonRenderer( |
|
530 | 529 | serializer=json.dumps, indent=4)) |
|
531 | 530 | config.add_directive('add_jsonrpc_method', add_jsonrpc_method) |
|
532 | 531 | |
|
533 | 532 | config.add_route_predicate( |
|
534 | 533 | 'jsonrpc_call', RoutePredicate) |
|
535 | 534 | |
|
536 | 535 | config.add_route( |
|
537 | 536 | 'apiv2', plugin_settings.get('url', DEFAULT_URL), jsonrpc_call=True) |
|
538 | 537 | |
|
539 | 538 | config.scan(plugin_module, ignore='rhodecode.api.tests') |
|
540 | 539 | # register some exception handling view |
|
541 | 540 | config.add_view(exception_view, context=JSONRPCBaseError) |
|
542 | 541 | config.add_view_predicate('jsonrpc_method_not_found', NotFoundPredicate) |
|
543 | 542 | config.add_notfound_view(exception_view, jsonrpc_method_not_found=True) |
@@ -1,413 +1,413 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2013-2018 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import time |
|
22 | 22 | import logging |
|
23 | 23 | |
|
24 | 24 | import formencode |
|
25 | 25 | import formencode.htmlfill |
|
26 | 26 | import peppercorn |
|
27 | 27 | |
|
28 | 28 | from pyramid.httpexceptions import HTTPNotFound, HTTPFound |
|
29 | 29 | from pyramid.view import view_config |
|
30 | 30 | from pyramid.renderers import render |
|
31 | 31 | from pyramid.response import Response |
|
32 | 32 | |
|
33 | 33 | from rhodecode.apps._base import BaseAppView |
|
34 | 34 | from rhodecode.lib import helpers as h |
|
35 | 35 | from rhodecode.lib.auth import LoginRequired, NotAnonymous, CSRFRequired |
|
36 | 36 | from rhodecode.lib.utils2 import time_to_datetime |
|
37 | 37 | from rhodecode.lib.ext_json import json |
|
38 | 38 | from rhodecode.lib.vcs.exceptions import VCSError, NodeNotChangedError |
|
39 | 39 | from rhodecode.model.gist import GistModel |
|
40 | 40 | from rhodecode.model.meta import Session |
|
41 | 41 | from rhodecode.model.db import Gist, User, or_ |
|
42 | 42 | from rhodecode.model import validation_schema |
|
43 | 43 | from rhodecode.model.validation_schema.schemas import gist_schema |
|
44 | 44 | |
|
45 | 45 | |
|
46 | 46 | log = logging.getLogger(__name__) |
|
47 | 47 | |
|
48 | 48 | |
|
49 | 49 | class GistView(BaseAppView): |
|
50 | 50 | |
|
51 | 51 | def load_default_context(self): |
|
52 | 52 | _ = self.request.translate |
|
53 | 53 | c = self._get_local_tmpl_context() |
|
54 | 54 | c.user = c.auth_user.get_instance() |
|
55 | 55 | |
|
56 | 56 | c.lifetime_values = [ |
|
57 | 57 | (-1, _('forever')), |
|
58 | 58 | (5, _('5 minutes')), |
|
59 | 59 | (60, _('1 hour')), |
|
60 | 60 | (60 * 24, _('1 day')), |
|
61 | 61 | (60 * 24 * 30, _('1 month')), |
|
62 | 62 | ] |
|
63 | 63 | |
|
64 | 64 | c.lifetime_options = [(c.lifetime_values, _("Lifetime"))] |
|
65 | 65 | c.acl_options = [ |
|
66 | 66 | (Gist.ACL_LEVEL_PRIVATE, _("Requires registered account")), |
|
67 | 67 | (Gist.ACL_LEVEL_PUBLIC, _("Can be accessed by anonymous users")) |
|
68 | 68 | ] |
|
69 | 69 | |
|
70 | 70 | |
|
71 | 71 | return c |
|
72 | 72 | |
|
73 | 73 | @LoginRequired() |
|
74 | 74 | @view_config( |
|
75 | 75 | route_name='gists_show', request_method='GET', |
|
76 | 76 | renderer='rhodecode:templates/admin/gists/index.mako') |
|
77 | 77 | def gist_show_all(self): |
|
78 | 78 | c = self.load_default_context() |
|
79 | 79 | |
|
80 | 80 | not_default_user = self._rhodecode_user.username != User.DEFAULT_USER |
|
81 | 81 | c.show_private = self.request.GET.get('private') and not_default_user |
|
82 | 82 | c.show_public = self.request.GET.get('public') and not_default_user |
|
83 | 83 | c.show_all = self.request.GET.get('all') and self._rhodecode_user.admin |
|
84 | 84 | |
|
85 | 85 | gists = _gists = Gist().query()\ |
|
86 | 86 | .filter(or_(Gist.gist_expires == -1, Gist.gist_expires >= time.time()))\ |
|
87 | 87 | .order_by(Gist.created_on.desc()) |
|
88 | 88 | |
|
89 | 89 | c.active = 'public' |
|
90 | 90 | # MY private |
|
91 | 91 | if c.show_private and not c.show_public: |
|
92 | 92 | gists = _gists.filter(Gist.gist_type == Gist.GIST_PRIVATE)\ |
|
93 | 93 | .filter(Gist.gist_owner == self._rhodecode_user.user_id) |
|
94 | 94 | c.active = 'my_private' |
|
95 | 95 | # MY public |
|
96 | 96 | elif c.show_public and not c.show_private: |
|
97 | 97 | gists = _gists.filter(Gist.gist_type == Gist.GIST_PUBLIC)\ |
|
98 | 98 | .filter(Gist.gist_owner == self._rhodecode_user.user_id) |
|
99 | 99 | c.active = 'my_public' |
|
100 | 100 | # MY public+private |
|
101 | 101 | elif c.show_private and c.show_public: |
|
102 | 102 | gists = _gists.filter(or_(Gist.gist_type == Gist.GIST_PUBLIC, |
|
103 | 103 | Gist.gist_type == Gist.GIST_PRIVATE))\ |
|
104 | 104 | .filter(Gist.gist_owner == self._rhodecode_user.user_id) |
|
105 | 105 | c.active = 'my_all' |
|
106 | 106 | # Show all by super-admin |
|
107 | 107 | elif c.show_all: |
|
108 | 108 | c.active = 'all' |
|
109 | 109 | gists = _gists |
|
110 | 110 | |
|
111 | 111 | # default show ALL public gists |
|
112 | 112 | if not c.show_public and not c.show_private and not c.show_all: |
|
113 | 113 | gists = _gists.filter(Gist.gist_type == Gist.GIST_PUBLIC) |
|
114 | 114 | c.active = 'public' |
|
115 | 115 | |
|
116 | 116 | _render = self.request.get_partial_renderer( |
|
117 | 117 | 'rhodecode:templates/data_table/_dt_elements.mako') |
|
118 | 118 | |
|
119 | 119 | data = [] |
|
120 | 120 | |
|
121 | 121 | for gist in gists: |
|
122 | 122 | data.append({ |
|
123 | 123 | 'created_on': _render('gist_created', gist.created_on), |
|
124 | 124 | 'created_on_raw': gist.created_on, |
|
125 | 125 | 'type': _render('gist_type', gist.gist_type), |
|
126 | 126 | 'access_id': _render('gist_access_id', gist.gist_access_id, gist.owner.full_contact), |
|
127 | 127 | 'author': _render('gist_author', gist.owner.full_contact, gist.created_on, gist.gist_expires), |
|
128 | 128 | 'author_raw': h.escape(gist.owner.full_contact), |
|
129 | 129 | 'expires': _render('gist_expires', gist.gist_expires), |
|
130 | 130 | 'description': _render('gist_description', gist.gist_description) |
|
131 | 131 | }) |
|
132 | 132 | c.data = json.dumps(data) |
|
133 | 133 | |
|
134 | 134 | return self._get_template_context(c) |
|
135 | 135 | |
|
136 | 136 | @LoginRequired() |
|
137 | 137 | @NotAnonymous() |
|
138 | 138 | @view_config( |
|
139 | 139 | route_name='gists_new', request_method='GET', |
|
140 | 140 | renderer='rhodecode:templates/admin/gists/new.mako') |
|
141 | 141 | def gist_new(self): |
|
142 | 142 | c = self.load_default_context() |
|
143 | 143 | return self._get_template_context(c) |
|
144 | 144 | |
|
145 | 145 | @LoginRequired() |
|
146 | 146 | @NotAnonymous() |
|
147 | 147 | @CSRFRequired() |
|
148 | 148 | @view_config( |
|
149 | 149 | route_name='gists_create', request_method='POST', |
|
150 | 150 | renderer='rhodecode:templates/admin/gists/new.mako') |
|
151 | 151 | def gist_create(self): |
|
152 | 152 | _ = self.request.translate |
|
153 | 153 | c = self.load_default_context() |
|
154 | 154 | |
|
155 | 155 | data = dict(self.request.POST) |
|
156 | 156 | data['filename'] = data.get('filename') or Gist.DEFAULT_FILENAME |
|
157 | 157 | data['nodes'] = [{ |
|
158 | 158 | 'filename': data['filename'], |
|
159 | 159 | 'content': data.get('content'), |
|
160 | 160 | 'mimetype': data.get('mimetype') # None is autodetect |
|
161 | 161 | }] |
|
162 | 162 | |
|
163 | 163 | data['gist_type'] = ( |
|
164 | 164 | Gist.GIST_PUBLIC if data.get('public') else Gist.GIST_PRIVATE) |
|
165 | 165 | data['gist_acl_level'] = ( |
|
166 | 166 | data.get('gist_acl_level') or Gist.ACL_LEVEL_PRIVATE) |
|
167 | 167 | |
|
168 | 168 | schema = gist_schema.GistSchema().bind( |
|
169 | 169 | lifetime_options=[x[0] for x in c.lifetime_values]) |
|
170 | 170 | |
|
171 | 171 | try: |
|
172 | 172 | |
|
173 | 173 | schema_data = schema.deserialize(data) |
|
174 | 174 | # convert to safer format with just KEYs so we sure no duplicates |
|
175 | 175 | schema_data['nodes'] = gist_schema.sequence_to_nodes( |
|
176 | 176 | schema_data['nodes']) |
|
177 | 177 | |
|
178 | 178 | gist = GistModel().create( |
|
179 | 179 | gist_id=schema_data['gistid'], # custom access id not real ID |
|
180 | 180 | description=schema_data['description'], |
|
181 | 181 | owner=self._rhodecode_user.user_id, |
|
182 | 182 | gist_mapping=schema_data['nodes'], |
|
183 | 183 | gist_type=schema_data['gist_type'], |
|
184 | 184 | lifetime=schema_data['lifetime'], |
|
185 | 185 | gist_acl_level=schema_data['gist_acl_level'] |
|
186 | 186 | ) |
|
187 | 187 | Session().commit() |
|
188 | 188 | new_gist_id = gist.gist_access_id |
|
189 | 189 | except validation_schema.Invalid as errors: |
|
190 | 190 | defaults = data |
|
191 | 191 | errors = errors.asdict() |
|
192 | 192 | |
|
193 | 193 | if 'nodes.0.content' in errors: |
|
194 | 194 | errors['content'] = errors['nodes.0.content'] |
|
195 | 195 | del errors['nodes.0.content'] |
|
196 | 196 | if 'nodes.0.filename' in errors: |
|
197 | 197 | errors['filename'] = errors['nodes.0.filename'] |
|
198 | 198 | del errors['nodes.0.filename'] |
|
199 | 199 | |
|
200 | 200 | data = render('rhodecode:templates/admin/gists/new.mako', |
|
201 | 201 | self._get_template_context(c), self.request) |
|
202 | 202 | html = formencode.htmlfill.render( |
|
203 | 203 | data, |
|
204 | 204 | defaults=defaults, |
|
205 | 205 | errors=errors, |
|
206 | 206 | prefix_error=False, |
|
207 | 207 | encoding="UTF-8", |
|
208 | 208 | force_defaults=False |
|
209 | 209 | ) |
|
210 | 210 | return Response(html) |
|
211 | 211 | |
|
212 | 212 | except Exception: |
|
213 | 213 | log.exception("Exception while trying to create a gist") |
|
214 | 214 | h.flash(_('Error occurred during gist creation'), category='error') |
|
215 | 215 | raise HTTPFound(h.route_url('gists_new')) |
|
216 | 216 | raise HTTPFound(h.route_url('gist_show', gist_id=new_gist_id)) |
|
217 | 217 | |
|
218 | 218 | @LoginRequired() |
|
219 | 219 | @NotAnonymous() |
|
220 | 220 | @CSRFRequired() |
|
221 | 221 | @view_config( |
|
222 | 222 | route_name='gist_delete', request_method='POST') |
|
223 | 223 | def gist_delete(self): |
|
224 | 224 | _ = self.request.translate |
|
225 | 225 | gist_id = self.request.matchdict['gist_id'] |
|
226 | 226 | |
|
227 | 227 | c = self.load_default_context() |
|
228 | 228 | c.gist = Gist.get_or_404(gist_id) |
|
229 | 229 | |
|
230 | 230 | owner = c.gist.gist_owner == self._rhodecode_user.user_id |
|
231 | 231 | if not (h.HasPermissionAny('hg.admin')() or owner): |
|
232 | 232 | log.warning('Deletion of Gist was forbidden ' |
|
233 | 233 | 'by unauthorized user: `%s`', self._rhodecode_user) |
|
234 | 234 | raise HTTPNotFound() |
|
235 | 235 | |
|
236 | 236 | GistModel().delete(c.gist) |
|
237 | 237 | Session().commit() |
|
238 | 238 | h.flash(_('Deleted gist %s') % c.gist.gist_access_id, category='success') |
|
239 | 239 | |
|
240 | 240 | raise HTTPFound(h.route_url('gists_show')) |
|
241 | 241 | |
|
242 | 242 | def _get_gist(self, gist_id): |
|
243 | 243 | |
|
244 | 244 | gist = Gist.get_or_404(gist_id) |
|
245 | 245 | |
|
246 | 246 | # Check if this gist is expired |
|
247 | 247 | if gist.gist_expires != -1: |
|
248 | 248 | if time.time() > gist.gist_expires: |
|
249 | 249 | log.error( |
|
250 | 250 | 'Gist expired at %s', time_to_datetime(gist.gist_expires)) |
|
251 | 251 | raise HTTPNotFound() |
|
252 | 252 | |
|
253 | 253 | # check if this gist requires a login |
|
254 | 254 | is_default_user = self._rhodecode_user.username == User.DEFAULT_USER |
|
255 | 255 | if gist.acl_level == Gist.ACL_LEVEL_PRIVATE and is_default_user: |
|
256 | 256 | log.error("Anonymous user %s tried to access protected gist `%s`", |
|
257 | 257 | self._rhodecode_user, gist_id) |
|
258 | 258 | raise HTTPNotFound() |
|
259 | 259 | return gist |
|
260 | 260 | |
|
261 | 261 | @LoginRequired() |
|
262 | 262 | @view_config( |
|
263 | 263 | route_name='gist_show', request_method='GET', |
|
264 | 264 | renderer='rhodecode:templates/admin/gists/show.mako') |
|
265 | 265 | @view_config( |
|
266 | 266 | route_name='gist_show_rev', request_method='GET', |
|
267 | 267 | renderer='rhodecode:templates/admin/gists/show.mako') |
|
268 | 268 | @view_config( |
|
269 | 269 | route_name='gist_show_formatted', request_method='GET', |
|
270 | 270 | renderer=None) |
|
271 | 271 | @view_config( |
|
272 | 272 | route_name='gist_show_formatted_path', request_method='GET', |
|
273 | 273 | renderer=None) |
|
274 | 274 | def gist_show(self): |
|
275 | 275 | gist_id = self.request.matchdict['gist_id'] |
|
276 | 276 | |
|
277 | 277 | # TODO(marcink): expose those via matching dict |
|
278 | 278 | revision = self.request.matchdict.get('revision', 'tip') |
|
279 | 279 | f_path = self.request.matchdict.get('f_path', None) |
|
280 | 280 | return_format = self.request.matchdict.get('format') |
|
281 | 281 | |
|
282 | 282 | c = self.load_default_context() |
|
283 | 283 | c.gist = self._get_gist(gist_id) |
|
284 | 284 | c.render = not self.request.GET.get('no-render', False) |
|
285 | 285 | |
|
286 | 286 | try: |
|
287 | 287 | c.file_last_commit, c.files = GistModel().get_gist_files( |
|
288 | 288 | gist_id, revision=revision) |
|
289 | 289 | except VCSError: |
|
290 | 290 | log.exception("Exception in gist show") |
|
291 | 291 | raise HTTPNotFound() |
|
292 | 292 | |
|
293 | 293 | if return_format == 'raw': |
|
294 | 294 | content = '\n\n'.join([f.content for f in c.files |
|
295 | 295 | if (f_path is None or f.path == f_path)]) |
|
296 | 296 | response = Response(content) |
|
297 | 297 | response.content_type = 'text/plain' |
|
298 | 298 | return response |
|
299 | 299 | |
|
300 | 300 | return self._get_template_context(c) |
|
301 | 301 | |
|
302 | 302 | @LoginRequired() |
|
303 | 303 | @NotAnonymous() |
|
304 | 304 | @view_config( |
|
305 | 305 | route_name='gist_edit', request_method='GET', |
|
306 | 306 | renderer='rhodecode:templates/admin/gists/edit.mako') |
|
307 | 307 | def gist_edit(self): |
|
308 | 308 | _ = self.request.translate |
|
309 | 309 | gist_id = self.request.matchdict['gist_id'] |
|
310 | 310 | c = self.load_default_context() |
|
311 | 311 | c.gist = self._get_gist(gist_id) |
|
312 | 312 | |
|
313 | 313 | owner = c.gist.gist_owner == self._rhodecode_user.user_id |
|
314 | 314 | if not (h.HasPermissionAny('hg.admin')() or owner): |
|
315 | 315 | raise HTTPNotFound() |
|
316 | 316 | |
|
317 | 317 | try: |
|
318 | 318 | c.file_last_commit, c.files = GistModel().get_gist_files(gist_id) |
|
319 | 319 | except VCSError: |
|
320 | 320 | log.exception("Exception in gist edit") |
|
321 | 321 | raise HTTPNotFound() |
|
322 | 322 | |
|
323 | 323 | if c.gist.gist_expires == -1: |
|
324 | 324 | expiry = _('never') |
|
325 | 325 | else: |
|
326 | 326 | # this cannot use timeago, since it's used in select2 as a value |
|
327 | 327 | expiry = h.age(h.time_to_datetime(c.gist.gist_expires)) |
|
328 | 328 | |
|
329 | 329 | c.lifetime_values.append( |
|
330 | 330 | (0, _('%(expiry)s - current value') % {'expiry': _(expiry)}) |
|
331 | 331 | ) |
|
332 | 332 | |
|
333 | 333 | return self._get_template_context(c) |
|
334 | 334 | |
|
335 | 335 | @LoginRequired() |
|
336 | 336 | @NotAnonymous() |
|
337 | 337 | @CSRFRequired() |
|
338 | 338 | @view_config( |
|
339 | 339 | route_name='gist_update', request_method='POST', |
|
340 | 340 | renderer='rhodecode:templates/admin/gists/edit.mako') |
|
341 | 341 | def gist_update(self): |
|
342 | 342 | _ = self.request.translate |
|
343 | 343 | gist_id = self.request.matchdict['gist_id'] |
|
344 | 344 | c = self.load_default_context() |
|
345 | 345 | c.gist = self._get_gist(gist_id) |
|
346 | 346 | |
|
347 | 347 | owner = c.gist.gist_owner == self._rhodecode_user.user_id |
|
348 | 348 | if not (h.HasPermissionAny('hg.admin')() or owner): |
|
349 | 349 | raise HTTPNotFound() |
|
350 | 350 | |
|
351 | 351 | data = peppercorn.parse(self.request.POST.items()) |
|
352 | 352 | |
|
353 | 353 | schema = gist_schema.GistSchema() |
|
354 | 354 | schema = schema.bind( |
|
355 | 355 | # '0' is special value to leave lifetime untouched |
|
356 | 356 | lifetime_options=[x[0] for x in c.lifetime_values] + [0], |
|
357 | 357 | ) |
|
358 | 358 | |
|
359 | 359 | try: |
|
360 | 360 | schema_data = schema.deserialize(data) |
|
361 | 361 | # convert to safer format with just KEYs so we sure no duplicates |
|
362 | 362 | schema_data['nodes'] = gist_schema.sequence_to_nodes( |
|
363 | 363 | schema_data['nodes']) |
|
364 | 364 | |
|
365 | 365 | GistModel().update( |
|
366 | 366 | gist=c.gist, |
|
367 | 367 | description=schema_data['description'], |
|
368 | 368 | owner=c.gist.owner, |
|
369 | 369 | gist_mapping=schema_data['nodes'], |
|
370 | 370 | lifetime=schema_data['lifetime'], |
|
371 | 371 | gist_acl_level=schema_data['gist_acl_level'] |
|
372 | 372 | ) |
|
373 | 373 | |
|
374 | 374 | Session().commit() |
|
375 | 375 | h.flash(_('Successfully updated gist content'), category='success') |
|
376 | 376 | except NodeNotChangedError: |
|
377 | 377 | # raised if nothing was changed in repo itself. We anyway then |
|
378 | 378 | # store only DB stuff for gist |
|
379 | 379 | Session().commit() |
|
380 | 380 | h.flash(_('Successfully updated gist data'), category='success') |
|
381 | 381 | except validation_schema.Invalid as errors: |
|
382 | 382 | errors = h.escape(errors.asdict()) |
|
383 | 383 | h.flash(_('Error occurred during update of gist {}: {}').format( |
|
384 | 384 | gist_id, errors), category='error') |
|
385 | 385 | except Exception: |
|
386 | 386 | log.exception("Exception in gist edit") |
|
387 | 387 | h.flash(_('Error occurred during update of gist %s') % gist_id, |
|
388 | 388 | category='error') |
|
389 | 389 | |
|
390 | 390 | raise HTTPFound(h.route_url('gist_show', gist_id=gist_id)) |
|
391 | 391 | |
|
392 | 392 | @LoginRequired() |
|
393 | 393 | @NotAnonymous() |
|
394 | 394 | @view_config( |
|
395 | 395 | route_name='gist_edit_check_revision', request_method='GET', |
|
396 | 396 | renderer='json_ext') |
|
397 | 397 | def gist_edit_check_revision(self): |
|
398 | 398 | _ = self.request.translate |
|
399 | 399 | gist_id = self.request.matchdict['gist_id'] |
|
400 | 400 | c = self.load_default_context() |
|
401 | 401 | c.gist = self._get_gist(gist_id) |
|
402 | 402 | |
|
403 | 403 | last_rev = c.gist.scm_instance().get_commit() |
|
404 | 404 | success = True |
|
405 | 405 | revision = self.request.GET.get('revision') |
|
406 | 406 | |
|
407 | 407 | if revision != last_rev.raw_id: |
|
408 | log.error('Last revision %s is different then submitted %s' | |
|
409 |
|
|
|
408 | log.error('Last revision %s is different then submitted %s', | |
|
409 | revision, last_rev) | |
|
410 | 410 | # our gist has newer version than we |
|
411 | 411 | success = False |
|
412 | 412 | |
|
413 | 413 | return {'success': success} |
@@ -1,461 +1,461 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2016-2018 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import time |
|
22 | 22 | import collections |
|
23 | 23 | import datetime |
|
24 | 24 | import formencode |
|
25 | 25 | import formencode.htmlfill |
|
26 | 26 | import logging |
|
27 | 27 | import urlparse |
|
28 | 28 | import requests |
|
29 | 29 | |
|
30 | 30 | from pyramid.httpexceptions import HTTPFound |
|
31 | 31 | from pyramid.view import view_config |
|
32 | 32 | |
|
33 | 33 | from rhodecode.apps._base import BaseAppView |
|
34 | 34 | from rhodecode.authentication.base import authenticate, HTTP_TYPE |
|
35 | 35 | from rhodecode.events import UserRegistered, trigger |
|
36 | 36 | from rhodecode.lib import helpers as h |
|
37 | 37 | from rhodecode.lib import audit_logger |
|
38 | 38 | from rhodecode.lib.auth import ( |
|
39 | 39 | AuthUser, HasPermissionAnyDecorator, CSRFRequired) |
|
40 | 40 | from rhodecode.lib.base import get_ip_addr |
|
41 | 41 | from rhodecode.lib.exceptions import UserCreationError |
|
42 | 42 | from rhodecode.lib.utils2 import safe_str |
|
43 | 43 | from rhodecode.model.db import User, UserApiKeys |
|
44 | 44 | from rhodecode.model.forms import LoginForm, RegisterForm, PasswordResetForm |
|
45 | 45 | from rhodecode.model.meta import Session |
|
46 | 46 | from rhodecode.model.auth_token import AuthTokenModel |
|
47 | 47 | from rhodecode.model.settings import SettingsModel |
|
48 | 48 | from rhodecode.model.user import UserModel |
|
49 | 49 | from rhodecode.translation import _ |
|
50 | 50 | |
|
51 | 51 | |
|
52 | 52 | log = logging.getLogger(__name__) |
|
53 | 53 | |
|
54 | 54 | CaptchaData = collections.namedtuple( |
|
55 | 55 | 'CaptchaData', 'active, private_key, public_key') |
|
56 | 56 | |
|
57 | 57 | |
|
58 | 58 | def _store_user_in_session(session, username, remember=False): |
|
59 | 59 | user = User.get_by_username(username, case_insensitive=True) |
|
60 | 60 | auth_user = AuthUser(user.user_id) |
|
61 | 61 | auth_user.set_authenticated() |
|
62 | 62 | cs = auth_user.get_cookie_store() |
|
63 | 63 | session['rhodecode_user'] = cs |
|
64 | 64 | user.update_lastlogin() |
|
65 | 65 | Session().commit() |
|
66 | 66 | |
|
67 | 67 | # If they want to be remembered, update the cookie |
|
68 | 68 | if remember: |
|
69 | 69 | _year = (datetime.datetime.now() + |
|
70 | 70 | datetime.timedelta(seconds=60 * 60 * 24 * 365)) |
|
71 | 71 | session._set_cookie_expires(_year) |
|
72 | 72 | |
|
73 | 73 | session.save() |
|
74 | 74 | |
|
75 | 75 | safe_cs = cs.copy() |
|
76 | 76 | safe_cs['password'] = '****' |
|
77 | 77 | log.info('user %s is now authenticated and stored in ' |
|
78 | 78 | 'session, session attrs %s', username, safe_cs) |
|
79 | 79 | |
|
80 | 80 | # dumps session attrs back to cookie |
|
81 | 81 | session._update_cookie_out() |
|
82 | 82 | # we set new cookie |
|
83 | 83 | headers = None |
|
84 | 84 | if session.request['set_cookie']: |
|
85 | 85 | # send set-cookie headers back to response to update cookie |
|
86 | 86 | headers = [('Set-Cookie', session.request['cookie_out'])] |
|
87 | 87 | return headers |
|
88 | 88 | |
|
89 | 89 | |
|
90 | 90 | def get_came_from(request): |
|
91 | 91 | came_from = safe_str(request.GET.get('came_from', '')) |
|
92 | 92 | parsed = urlparse.urlparse(came_from) |
|
93 | 93 | allowed_schemes = ['http', 'https'] |
|
94 | 94 | default_came_from = h.route_path('home') |
|
95 | 95 | if parsed.scheme and parsed.scheme not in allowed_schemes: |
|
96 |
log.error('Suspicious URL scheme detected %s for url %s' |
|
|
97 |
|
|
|
96 | log.error('Suspicious URL scheme detected %s for url %s', | |
|
97 | parsed.scheme, parsed) | |
|
98 | 98 | came_from = default_came_from |
|
99 | 99 | elif parsed.netloc and request.host != parsed.netloc: |
|
100 | 100 | log.error('Suspicious NETLOC detected %s for url %s server url ' |
|
101 |
'is: %s' |
|
|
101 | 'is: %s', parsed.netloc, parsed, request.host) | |
|
102 | 102 | came_from = default_came_from |
|
103 | 103 | elif any(bad_str in parsed.path for bad_str in ('\r', '\n')): |
|
104 |
log.error('Header injection detected `%s` for url %s server url ' |
|
|
105 |
|
|
|
104 | log.error('Header injection detected `%s` for url %s server url ', | |
|
105 | parsed.path, parsed) | |
|
106 | 106 | came_from = default_came_from |
|
107 | 107 | |
|
108 | 108 | return came_from or default_came_from |
|
109 | 109 | |
|
110 | 110 | |
|
111 | 111 | class LoginView(BaseAppView): |
|
112 | 112 | |
|
113 | 113 | def load_default_context(self): |
|
114 | 114 | c = self._get_local_tmpl_context() |
|
115 | 115 | c.came_from = get_came_from(self.request) |
|
116 | 116 | |
|
117 | 117 | return c |
|
118 | 118 | |
|
119 | 119 | def _get_captcha_data(self): |
|
120 | 120 | settings = SettingsModel().get_all_settings() |
|
121 | 121 | private_key = settings.get('rhodecode_captcha_private_key') |
|
122 | 122 | public_key = settings.get('rhodecode_captcha_public_key') |
|
123 | 123 | active = bool(private_key) |
|
124 | 124 | return CaptchaData( |
|
125 | 125 | active=active, private_key=private_key, public_key=public_key) |
|
126 | 126 | |
|
127 | 127 | def validate_captcha(self, private_key): |
|
128 | 128 | |
|
129 | 129 | captcha_rs = self.request.POST.get('g-recaptcha-response') |
|
130 | 130 | url = "https://www.google.com/recaptcha/api/siteverify" |
|
131 | 131 | params = { |
|
132 | 132 | 'secret': private_key, |
|
133 | 133 | 'response': captcha_rs, |
|
134 | 134 | 'remoteip': get_ip_addr(self.request.environ) |
|
135 | 135 | } |
|
136 | 136 | verify_rs = requests.get(url, params=params, verify=True, timeout=60) |
|
137 | 137 | verify_rs = verify_rs.json() |
|
138 | 138 | captcha_status = verify_rs.get('success', False) |
|
139 | 139 | captcha_errors = verify_rs.get('error-codes', []) |
|
140 | 140 | if not isinstance(captcha_errors, list): |
|
141 | 141 | captcha_errors = [captcha_errors] |
|
142 | 142 | captcha_errors = ', '.join(captcha_errors) |
|
143 | 143 | captcha_message = '' |
|
144 | 144 | if captcha_status is False: |
|
145 | 145 | captcha_message = "Bad captcha. Errors: {}".format( |
|
146 | 146 | captcha_errors) |
|
147 | 147 | |
|
148 | 148 | return captcha_status, captcha_message |
|
149 | 149 | |
|
150 | 150 | @view_config( |
|
151 | 151 | route_name='login', request_method='GET', |
|
152 | 152 | renderer='rhodecode:templates/login.mako') |
|
153 | 153 | def login(self): |
|
154 | 154 | c = self.load_default_context() |
|
155 | 155 | auth_user = self._rhodecode_user |
|
156 | 156 | |
|
157 | 157 | # redirect if already logged in |
|
158 | 158 | if (auth_user.is_authenticated and |
|
159 | 159 | not auth_user.is_default and auth_user.ip_allowed): |
|
160 | 160 | raise HTTPFound(c.came_from) |
|
161 | 161 | |
|
162 | 162 | # check if we use headers plugin, and try to login using it. |
|
163 | 163 | try: |
|
164 | 164 | log.debug('Running PRE-AUTH for headers based authentication') |
|
165 | 165 | auth_info = authenticate( |
|
166 | 166 | '', '', self.request.environ, HTTP_TYPE, skip_missing=True) |
|
167 | 167 | if auth_info: |
|
168 | 168 | headers = _store_user_in_session( |
|
169 | 169 | self.session, auth_info.get('username')) |
|
170 | 170 | raise HTTPFound(c.came_from, headers=headers) |
|
171 | 171 | except UserCreationError as e: |
|
172 | 172 | log.error(e) |
|
173 | 173 | h.flash(e, category='error') |
|
174 | 174 | |
|
175 | 175 | return self._get_template_context(c) |
|
176 | 176 | |
|
177 | 177 | @view_config( |
|
178 | 178 | route_name='login', request_method='POST', |
|
179 | 179 | renderer='rhodecode:templates/login.mako') |
|
180 | 180 | def login_post(self): |
|
181 | 181 | c = self.load_default_context() |
|
182 | 182 | |
|
183 | 183 | login_form = LoginForm(self.request.translate)() |
|
184 | 184 | |
|
185 | 185 | try: |
|
186 | 186 | self.session.invalidate() |
|
187 | 187 | form_result = login_form.to_python(self.request.POST) |
|
188 | 188 | # form checks for username/password, now we're authenticated |
|
189 | 189 | headers = _store_user_in_session( |
|
190 | 190 | self.session, |
|
191 | 191 | username=form_result['username'], |
|
192 | 192 | remember=form_result['remember']) |
|
193 | 193 | log.debug('Redirecting to "%s" after login.', c.came_from) |
|
194 | 194 | |
|
195 | 195 | audit_user = audit_logger.UserWrap( |
|
196 | 196 | username=self.request.POST.get('username'), |
|
197 | 197 | ip_addr=self.request.remote_addr) |
|
198 | 198 | action_data = {'user_agent': self.request.user_agent} |
|
199 | 199 | audit_logger.store_web( |
|
200 | 200 | 'user.login.success', action_data=action_data, |
|
201 | 201 | user=audit_user, commit=True) |
|
202 | 202 | |
|
203 | 203 | raise HTTPFound(c.came_from, headers=headers) |
|
204 | 204 | except formencode.Invalid as errors: |
|
205 | 205 | defaults = errors.value |
|
206 | 206 | # remove password from filling in form again |
|
207 | 207 | defaults.pop('password', None) |
|
208 | 208 | render_ctx = { |
|
209 | 209 | 'errors': errors.error_dict, |
|
210 | 210 | 'defaults': defaults, |
|
211 | 211 | } |
|
212 | 212 | |
|
213 | 213 | audit_user = audit_logger.UserWrap( |
|
214 | 214 | username=self.request.POST.get('username'), |
|
215 | 215 | ip_addr=self.request.remote_addr) |
|
216 | 216 | action_data = {'user_agent': self.request.user_agent} |
|
217 | 217 | audit_logger.store_web( |
|
218 | 218 | 'user.login.failure', action_data=action_data, |
|
219 | 219 | user=audit_user, commit=True) |
|
220 | 220 | return self._get_template_context(c, **render_ctx) |
|
221 | 221 | |
|
222 | 222 | except UserCreationError as e: |
|
223 | 223 | # headers auth or other auth functions that create users on |
|
224 | 224 | # the fly can throw this exception signaling that there's issue |
|
225 | 225 | # with user creation, explanation should be provided in |
|
226 | 226 | # Exception itself |
|
227 | 227 | h.flash(e, category='error') |
|
228 | 228 | return self._get_template_context(c) |
|
229 | 229 | |
|
230 | 230 | @CSRFRequired() |
|
231 | 231 | @view_config(route_name='logout', request_method='POST') |
|
232 | 232 | def logout(self): |
|
233 | 233 | auth_user = self._rhodecode_user |
|
234 | 234 | log.info('Deleting session for user: `%s`', auth_user) |
|
235 | 235 | |
|
236 | 236 | action_data = {'user_agent': self.request.user_agent} |
|
237 | 237 | audit_logger.store_web( |
|
238 | 238 | 'user.logout', action_data=action_data, |
|
239 | 239 | user=auth_user, commit=True) |
|
240 | 240 | self.session.delete() |
|
241 | 241 | return HTTPFound(h.route_path('home')) |
|
242 | 242 | |
|
243 | 243 | @HasPermissionAnyDecorator( |
|
244 | 244 | 'hg.admin', 'hg.register.auto_activate', 'hg.register.manual_activate') |
|
245 | 245 | @view_config( |
|
246 | 246 | route_name='register', request_method='GET', |
|
247 | 247 | renderer='rhodecode:templates/register.mako',) |
|
248 | 248 | def register(self, defaults=None, errors=None): |
|
249 | 249 | c = self.load_default_context() |
|
250 | 250 | defaults = defaults or {} |
|
251 | 251 | errors = errors or {} |
|
252 | 252 | |
|
253 | 253 | settings = SettingsModel().get_all_settings() |
|
254 | 254 | register_message = settings.get('rhodecode_register_message') or '' |
|
255 | 255 | captcha = self._get_captcha_data() |
|
256 | 256 | auto_active = 'hg.register.auto_activate' in User.get_default_user()\ |
|
257 | 257 | .AuthUser().permissions['global'] |
|
258 | 258 | |
|
259 | 259 | render_ctx = self._get_template_context(c) |
|
260 | 260 | render_ctx.update({ |
|
261 | 261 | 'defaults': defaults, |
|
262 | 262 | 'errors': errors, |
|
263 | 263 | 'auto_active': auto_active, |
|
264 | 264 | 'captcha_active': captcha.active, |
|
265 | 265 | 'captcha_public_key': captcha.public_key, |
|
266 | 266 | 'register_message': register_message, |
|
267 | 267 | }) |
|
268 | 268 | return render_ctx |
|
269 | 269 | |
|
270 | 270 | @HasPermissionAnyDecorator( |
|
271 | 271 | 'hg.admin', 'hg.register.auto_activate', 'hg.register.manual_activate') |
|
272 | 272 | @view_config( |
|
273 | 273 | route_name='register', request_method='POST', |
|
274 | 274 | renderer='rhodecode:templates/register.mako') |
|
275 | 275 | def register_post(self): |
|
276 | 276 | self.load_default_context() |
|
277 | 277 | captcha = self._get_captcha_data() |
|
278 | 278 | auto_active = 'hg.register.auto_activate' in User.get_default_user()\ |
|
279 | 279 | .AuthUser().permissions['global'] |
|
280 | 280 | |
|
281 | 281 | register_form = RegisterForm(self.request.translate)() |
|
282 | 282 | try: |
|
283 | 283 | |
|
284 | 284 | form_result = register_form.to_python(self.request.POST) |
|
285 | 285 | form_result['active'] = auto_active |
|
286 | 286 | |
|
287 | 287 | if captcha.active: |
|
288 | 288 | captcha_status, captcha_message = self.validate_captcha( |
|
289 | 289 | captcha.private_key) |
|
290 | 290 | |
|
291 | 291 | if not captcha_status: |
|
292 | 292 | _value = form_result |
|
293 | 293 | _msg = _('Bad captcha') |
|
294 | 294 | error_dict = {'recaptcha_field': captcha_message} |
|
295 | 295 | raise formencode.Invalid( |
|
296 | 296 | _msg, _value, None, error_dict=error_dict) |
|
297 | 297 | |
|
298 | 298 | new_user = UserModel().create_registration(form_result) |
|
299 | 299 | |
|
300 | 300 | action_data = {'data': new_user.get_api_data(), |
|
301 | 301 | 'user_agent': self.request.user_agent} |
|
302 | 302 | |
|
303 | 303 | audit_user = audit_logger.UserWrap( |
|
304 | 304 | username=new_user.username, |
|
305 | 305 | user_id=new_user.user_id, |
|
306 | 306 | ip_addr=self.request.remote_addr) |
|
307 | 307 | |
|
308 | 308 | audit_logger.store_web( |
|
309 | 309 | 'user.register', action_data=action_data, |
|
310 | 310 | user=audit_user) |
|
311 | 311 | |
|
312 | 312 | event = UserRegistered(user=new_user, session=self.session) |
|
313 | 313 | trigger(event) |
|
314 | 314 | h.flash( |
|
315 | 315 | _('You have successfully registered with RhodeCode'), |
|
316 | 316 | category='success') |
|
317 | 317 | Session().commit() |
|
318 | 318 | |
|
319 | 319 | redirect_ro = self.request.route_path('login') |
|
320 | 320 | raise HTTPFound(redirect_ro) |
|
321 | 321 | |
|
322 | 322 | except formencode.Invalid as errors: |
|
323 | 323 | errors.value.pop('password', None) |
|
324 | 324 | errors.value.pop('password_confirmation', None) |
|
325 | 325 | return self.register( |
|
326 | 326 | defaults=errors.value, errors=errors.error_dict) |
|
327 | 327 | |
|
328 | 328 | except UserCreationError as e: |
|
329 | 329 | # container auth or other auth functions that create users on |
|
330 | 330 | # the fly can throw this exception signaling that there's issue |
|
331 | 331 | # with user creation, explanation should be provided in |
|
332 | 332 | # Exception itself |
|
333 | 333 | h.flash(e, category='error') |
|
334 | 334 | return self.register() |
|
335 | 335 | |
|
336 | 336 | @view_config( |
|
337 | 337 | route_name='reset_password', request_method=('GET', 'POST'), |
|
338 | 338 | renderer='rhodecode:templates/password_reset.mako') |
|
339 | 339 | def password_reset(self): |
|
340 | 340 | c = self.load_default_context() |
|
341 | 341 | captcha = self._get_captcha_data() |
|
342 | 342 | |
|
343 | 343 | template_context = { |
|
344 | 344 | 'captcha_active': captcha.active, |
|
345 | 345 | 'captcha_public_key': captcha.public_key, |
|
346 | 346 | 'defaults': {}, |
|
347 | 347 | 'errors': {}, |
|
348 | 348 | } |
|
349 | 349 | |
|
350 | 350 | # always send implicit message to prevent from discovery of |
|
351 | 351 | # matching emails |
|
352 | 352 | msg = _('If such email exists, a password reset link was sent to it.') |
|
353 | 353 | |
|
354 | 354 | if self.request.POST: |
|
355 | 355 | if h.HasPermissionAny('hg.password_reset.disabled')(): |
|
356 | 356 | _email = self.request.POST.get('email', '') |
|
357 | 357 | log.error('Failed attempt to reset password for `%s`.', _email) |
|
358 | 358 | h.flash(_('Password reset has been disabled.'), |
|
359 | 359 | category='error') |
|
360 | 360 | return HTTPFound(self.request.route_path('reset_password')) |
|
361 | 361 | |
|
362 | 362 | password_reset_form = PasswordResetForm(self.request.translate)() |
|
363 | 363 | try: |
|
364 | 364 | form_result = password_reset_form.to_python( |
|
365 | 365 | self.request.POST) |
|
366 | 366 | user_email = form_result['email'] |
|
367 | 367 | |
|
368 | 368 | if captcha.active: |
|
369 | 369 | captcha_status, captcha_message = self.validate_captcha( |
|
370 | 370 | captcha.private_key) |
|
371 | 371 | |
|
372 | 372 | if not captcha_status: |
|
373 | 373 | _value = form_result |
|
374 | 374 | _msg = _('Bad captcha') |
|
375 | 375 | error_dict = {'recaptcha_field': captcha_message} |
|
376 | 376 | raise formencode.Invalid( |
|
377 | 377 | _msg, _value, None, error_dict=error_dict) |
|
378 | 378 | |
|
379 | 379 | # Generate reset URL and send mail. |
|
380 | 380 | user = User.get_by_email(user_email) |
|
381 | 381 | |
|
382 | 382 | # generate password reset token that expires in 10 minutes |
|
383 | 383 | description = u'Generated token for password reset from {}'.format( |
|
384 | 384 | datetime.datetime.now().isoformat()) |
|
385 | 385 | |
|
386 | 386 | reset_token = UserModel().add_auth_token( |
|
387 | 387 | user=user, lifetime_minutes=10, |
|
388 | 388 | role=UserModel.auth_token_role.ROLE_PASSWORD_RESET, |
|
389 | 389 | description=description) |
|
390 | 390 | Session().commit() |
|
391 | 391 | |
|
392 | 392 | log.debug('Successfully created password recovery token') |
|
393 | 393 | password_reset_url = self.request.route_url( |
|
394 | 394 | 'reset_password_confirmation', |
|
395 | 395 | _query={'key': reset_token.api_key}) |
|
396 | 396 | UserModel().reset_password_link( |
|
397 | 397 | form_result, password_reset_url) |
|
398 | 398 | # Display success message and redirect. |
|
399 | 399 | h.flash(msg, category='success') |
|
400 | 400 | |
|
401 | 401 | action_data = {'email': user_email, |
|
402 | 402 | 'user_agent': self.request.user_agent} |
|
403 | 403 | audit_logger.store_web( |
|
404 | 404 | 'user.password.reset_request', action_data=action_data, |
|
405 | 405 | user=self._rhodecode_user, commit=True) |
|
406 | 406 | return HTTPFound(self.request.route_path('reset_password')) |
|
407 | 407 | |
|
408 | 408 | except formencode.Invalid as errors: |
|
409 | 409 | template_context.update({ |
|
410 | 410 | 'defaults': errors.value, |
|
411 | 411 | 'errors': errors.error_dict, |
|
412 | 412 | }) |
|
413 | 413 | if not self.request.POST.get('email'): |
|
414 | 414 | # case of empty email, we want to report that |
|
415 | 415 | return self._get_template_context(c, **template_context) |
|
416 | 416 | |
|
417 | 417 | if 'recaptcha_field' in errors.error_dict: |
|
418 | 418 | # case of failed captcha |
|
419 | 419 | return self._get_template_context(c, **template_context) |
|
420 | 420 | |
|
421 | 421 | log.debug('faking response on invalid password reset') |
|
422 | 422 | # make this take 2s, to prevent brute forcing. |
|
423 | 423 | time.sleep(2) |
|
424 | 424 | h.flash(msg, category='success') |
|
425 | 425 | return HTTPFound(self.request.route_path('reset_password')) |
|
426 | 426 | |
|
427 | 427 | return self._get_template_context(c, **template_context) |
|
428 | 428 | |
|
429 | 429 | @view_config(route_name='reset_password_confirmation', |
|
430 | 430 | request_method='GET') |
|
431 | 431 | def password_reset_confirmation(self): |
|
432 | 432 | self.load_default_context() |
|
433 | 433 | if self.request.GET and self.request.GET.get('key'): |
|
434 | 434 | # make this take 2s, to prevent brute forcing. |
|
435 | 435 | time.sleep(2) |
|
436 | 436 | |
|
437 | 437 | token = AuthTokenModel().get_auth_token( |
|
438 | 438 | self.request.GET.get('key')) |
|
439 | 439 | |
|
440 | 440 | # verify token is the correct role |
|
441 | 441 | if token is None or token.role != UserApiKeys.ROLE_PASSWORD_RESET: |
|
442 | 442 | log.debug('Got token with role:%s expected is %s', |
|
443 | 443 | getattr(token, 'role', 'EMPTY_TOKEN'), |
|
444 | 444 | UserApiKeys.ROLE_PASSWORD_RESET) |
|
445 | 445 | h.flash( |
|
446 | 446 | _('Given reset token is invalid'), category='error') |
|
447 | 447 | return HTTPFound(self.request.route_path('reset_password')) |
|
448 | 448 | |
|
449 | 449 | try: |
|
450 | 450 | owner = token.user |
|
451 | 451 | data = {'email': owner.email, 'token': token.api_key} |
|
452 | 452 | UserModel().reset_password(data) |
|
453 | 453 | h.flash( |
|
454 | 454 | _('Your password reset was successful, ' |
|
455 | 455 | 'a new password has been sent to your email'), |
|
456 | 456 | category='success') |
|
457 | 457 | except Exception as e: |
|
458 | 458 | log.error(e) |
|
459 | 459 | return HTTPFound(self.request.route_path('reset_password')) |
|
460 | 460 | |
|
461 | 461 | return HTTPFound(self.request.route_path('login')) |
@@ -1,313 +1,313 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2012-2018 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | |
|
22 | 22 | import logging |
|
23 | 23 | |
|
24 | 24 | from pyramid.httpexceptions import HTTPBadRequest, HTTPNotFound, HTTPFound |
|
25 | 25 | from pyramid.view import view_config |
|
26 | 26 | from pyramid.renderers import render |
|
27 | 27 | from pyramid.response import Response |
|
28 | 28 | |
|
29 | 29 | from rhodecode.apps._base import RepoAppView |
|
30 | 30 | from rhodecode.controllers.utils import parse_path_ref, get_commit_from_ref_name |
|
31 | 31 | from rhodecode.lib import helpers as h |
|
32 | 32 | from rhodecode.lib import diffs, codeblocks |
|
33 | 33 | from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator |
|
34 | 34 | from rhodecode.lib.utils import safe_str |
|
35 | 35 | from rhodecode.lib.utils2 import safe_unicode, str2bool |
|
36 | 36 | from rhodecode.lib.vcs.exceptions import ( |
|
37 | 37 | EmptyRepositoryError, RepositoryError, RepositoryRequirementError, |
|
38 | 38 | NodeDoesNotExistError) |
|
39 | 39 | from rhodecode.model.db import Repository, ChangesetStatus |
|
40 | 40 | |
|
41 | 41 | log = logging.getLogger(__name__) |
|
42 | 42 | |
|
43 | 43 | |
|
44 | 44 | class RepoCompareView(RepoAppView): |
|
45 | 45 | def load_default_context(self): |
|
46 | 46 | c = self._get_local_tmpl_context(include_app_defaults=True) |
|
47 | 47 | |
|
48 | 48 | c.rhodecode_repo = self.rhodecode_vcs_repo |
|
49 | 49 | |
|
50 | 50 | |
|
51 | 51 | return c |
|
52 | 52 | |
|
53 | 53 | def _get_commit_or_redirect( |
|
54 | 54 | self, ref, ref_type, repo, redirect_after=True, partial=False): |
|
55 | 55 | """ |
|
56 | 56 | This is a safe way to get a commit. If an error occurs it |
|
57 | 57 | redirects to a commit with a proper message. If partial is set |
|
58 | 58 | then it does not do redirect raise and throws an exception instead. |
|
59 | 59 | """ |
|
60 | 60 | _ = self.request.translate |
|
61 | 61 | try: |
|
62 | 62 | return get_commit_from_ref_name(repo, safe_str(ref), ref_type) |
|
63 | 63 | except EmptyRepositoryError: |
|
64 | 64 | if not redirect_after: |
|
65 | 65 | return repo.scm_instance().EMPTY_COMMIT |
|
66 | 66 | h.flash(h.literal(_('There are no commits yet')), |
|
67 | 67 | category='warning') |
|
68 | 68 | if not partial: |
|
69 | 69 | raise HTTPFound( |
|
70 | 70 | h.route_path('repo_summary', repo_name=repo.repo_name)) |
|
71 | 71 | raise HTTPBadRequest() |
|
72 | 72 | |
|
73 | 73 | except RepositoryError as e: |
|
74 | 74 | log.exception(safe_str(e)) |
|
75 | 75 | h.flash(safe_str(h.escape(e)), category='warning') |
|
76 | 76 | if not partial: |
|
77 | 77 | raise HTTPFound( |
|
78 | 78 | h.route_path('repo_summary', repo_name=repo.repo_name)) |
|
79 | 79 | raise HTTPBadRequest() |
|
80 | 80 | |
|
81 | 81 | @LoginRequired() |
|
82 | 82 | @HasRepoPermissionAnyDecorator( |
|
83 | 83 | 'repository.read', 'repository.write', 'repository.admin') |
|
84 | 84 | @view_config( |
|
85 | 85 | route_name='repo_compare_select', request_method='GET', |
|
86 | 86 | renderer='rhodecode:templates/compare/compare_diff.mako') |
|
87 | 87 | def compare_select(self): |
|
88 | 88 | _ = self.request.translate |
|
89 | 89 | c = self.load_default_context() |
|
90 | 90 | |
|
91 | 91 | source_repo = self.db_repo_name |
|
92 | 92 | target_repo = self.request.GET.get('target_repo', source_repo) |
|
93 | 93 | c.source_repo = Repository.get_by_repo_name(source_repo) |
|
94 | 94 | c.target_repo = Repository.get_by_repo_name(target_repo) |
|
95 | 95 | |
|
96 | 96 | if c.source_repo is None or c.target_repo is None: |
|
97 | 97 | raise HTTPNotFound() |
|
98 | 98 | |
|
99 | 99 | c.compare_home = True |
|
100 | 100 | c.commit_ranges = [] |
|
101 | 101 | c.collapse_all_commits = False |
|
102 | 102 | c.diffset = None |
|
103 | 103 | c.limited_diff = False |
|
104 | 104 | c.source_ref = c.target_ref = _('Select commit') |
|
105 | 105 | c.source_ref_type = "" |
|
106 | 106 | c.target_ref_type = "" |
|
107 | 107 | c.commit_statuses = ChangesetStatus.STATUSES |
|
108 | 108 | c.preview_mode = False |
|
109 | 109 | c.file_path = None |
|
110 | 110 | |
|
111 | 111 | return self._get_template_context(c) |
|
112 | 112 | |
|
113 | 113 | @LoginRequired() |
|
114 | 114 | @HasRepoPermissionAnyDecorator( |
|
115 | 115 | 'repository.read', 'repository.write', 'repository.admin') |
|
116 | 116 | @view_config( |
|
117 | 117 | route_name='repo_compare', request_method='GET', |
|
118 | 118 | renderer=None) |
|
119 | 119 | def compare(self): |
|
120 | 120 | _ = self.request.translate |
|
121 | 121 | c = self.load_default_context() |
|
122 | 122 | |
|
123 | 123 | source_ref_type = self.request.matchdict['source_ref_type'] |
|
124 | 124 | source_ref = self.request.matchdict['source_ref'] |
|
125 | 125 | target_ref_type = self.request.matchdict['target_ref_type'] |
|
126 | 126 | target_ref = self.request.matchdict['target_ref'] |
|
127 | 127 | |
|
128 | 128 | # source_ref will be evaluated in source_repo |
|
129 | 129 | source_repo_name = self.db_repo_name |
|
130 | 130 | source_path, source_id = parse_path_ref(source_ref) |
|
131 | 131 | |
|
132 | 132 | # target_ref will be evaluated in target_repo |
|
133 | 133 | target_repo_name = self.request.GET.get('target_repo', source_repo_name) |
|
134 | 134 | target_path, target_id = parse_path_ref( |
|
135 | 135 | target_ref, default_path=self.request.GET.get('f_path', '')) |
|
136 | 136 | |
|
137 | 137 | # if merge is True |
|
138 | 138 | # Show what changes since the shared ancestor commit of target/source |
|
139 | 139 | # the source would get if it was merged with target. Only commits |
|
140 | 140 | # which are in target but not in source will be shown. |
|
141 | 141 | merge = str2bool(self.request.GET.get('merge')) |
|
142 | 142 | # if merge is False |
|
143 | 143 | # Show a raw diff of source/target refs even if no ancestor exists |
|
144 | 144 | |
|
145 | 145 | # c.fulldiff disables cut_off_limit |
|
146 | 146 | c.fulldiff = str2bool(self.request.GET.get('fulldiff')) |
|
147 | 147 | |
|
148 | 148 | c.file_path = target_path |
|
149 | 149 | c.commit_statuses = ChangesetStatus.STATUSES |
|
150 | 150 | |
|
151 | 151 | # if partial, returns just compare_commits.html (commits log) |
|
152 | 152 | partial = self.request.is_xhr |
|
153 | 153 | |
|
154 | 154 | # swap url for compare_diff page |
|
155 | 155 | c.swap_url = h.route_path( |
|
156 | 156 | 'repo_compare', |
|
157 | 157 | repo_name=target_repo_name, |
|
158 | 158 | source_ref_type=target_ref_type, |
|
159 | 159 | source_ref=target_ref, |
|
160 | 160 | target_repo=source_repo_name, |
|
161 | 161 | target_ref_type=source_ref_type, |
|
162 | 162 | target_ref=source_ref, |
|
163 | 163 | _query=dict(merge=merge and '1' or '', f_path=target_path)) |
|
164 | 164 | |
|
165 | 165 | source_repo = Repository.get_by_repo_name(source_repo_name) |
|
166 | 166 | target_repo = Repository.get_by_repo_name(target_repo_name) |
|
167 | 167 | |
|
168 | 168 | if source_repo is None: |
|
169 | 169 | log.error('Could not find the source repo: {}' |
|
170 | 170 | .format(source_repo_name)) |
|
171 | 171 | h.flash(_('Could not find the source repo: `{}`') |
|
172 | 172 | .format(h.escape(source_repo_name)), category='error') |
|
173 | 173 | raise HTTPFound( |
|
174 | 174 | h.route_path('repo_compare_select', repo_name=self.db_repo_name)) |
|
175 | 175 | |
|
176 | 176 | if target_repo is None: |
|
177 | 177 | log.error('Could not find the target repo: {}' |
|
178 | 178 | .format(source_repo_name)) |
|
179 | 179 | h.flash(_('Could not find the target repo: `{}`') |
|
180 | 180 | .format(h.escape(target_repo_name)), category='error') |
|
181 | 181 | raise HTTPFound( |
|
182 | 182 | h.route_path('repo_compare_select', repo_name=self.db_repo_name)) |
|
183 | 183 | |
|
184 | 184 | source_scm = source_repo.scm_instance() |
|
185 | 185 | target_scm = target_repo.scm_instance() |
|
186 | 186 | |
|
187 | 187 | source_alias = source_scm.alias |
|
188 | 188 | target_alias = target_scm.alias |
|
189 | 189 | if source_alias != target_alias: |
|
190 | 190 | msg = _('The comparison of two different kinds of remote repos ' |
|
191 | 191 | 'is not available') |
|
192 | 192 | log.error(msg) |
|
193 | 193 | h.flash(msg, category='error') |
|
194 | 194 | raise HTTPFound( |
|
195 | 195 | h.route_path('repo_compare_select', repo_name=self.db_repo_name)) |
|
196 | 196 | |
|
197 | 197 | source_commit = self._get_commit_or_redirect( |
|
198 | 198 | ref=source_id, ref_type=source_ref_type, repo=source_repo, |
|
199 | 199 | partial=partial) |
|
200 | 200 | target_commit = self._get_commit_or_redirect( |
|
201 | 201 | ref=target_id, ref_type=target_ref_type, repo=target_repo, |
|
202 | 202 | partial=partial) |
|
203 | 203 | |
|
204 | 204 | c.compare_home = False |
|
205 | 205 | c.source_repo = source_repo |
|
206 | 206 | c.target_repo = target_repo |
|
207 | 207 | c.source_ref = source_ref |
|
208 | 208 | c.target_ref = target_ref |
|
209 | 209 | c.source_ref_type = source_ref_type |
|
210 | 210 | c.target_ref_type = target_ref_type |
|
211 | 211 | |
|
212 | 212 | pre_load = ["author", "branch", "date", "message"] |
|
213 | 213 | c.ancestor = None |
|
214 | 214 | |
|
215 | 215 | if c.file_path: |
|
216 | 216 | if source_commit == target_commit: |
|
217 | 217 | c.commit_ranges = [] |
|
218 | 218 | else: |
|
219 | 219 | c.commit_ranges = [target_commit] |
|
220 | 220 | else: |
|
221 | 221 | try: |
|
222 | 222 | c.commit_ranges = source_scm.compare( |
|
223 | 223 | source_commit.raw_id, target_commit.raw_id, |
|
224 | 224 | target_scm, merge, pre_load=pre_load) |
|
225 | 225 | if merge: |
|
226 | 226 | c.ancestor = source_scm.get_common_ancestor( |
|
227 | 227 | source_commit.raw_id, target_commit.raw_id, target_scm) |
|
228 | 228 | except RepositoryRequirementError: |
|
229 | 229 | msg = _('Could not compare repos with different ' |
|
230 | 230 | 'large file settings') |
|
231 | 231 | log.error(msg) |
|
232 | 232 | if partial: |
|
233 | 233 | return Response(msg) |
|
234 | 234 | h.flash(msg, category='error') |
|
235 | 235 | raise HTTPFound( |
|
236 | 236 | h.route_path('repo_compare_select', |
|
237 | 237 | repo_name=self.db_repo_name)) |
|
238 | 238 | |
|
239 | 239 | c.statuses = self.db_repo.statuses( |
|
240 | 240 | [x.raw_id for x in c.commit_ranges]) |
|
241 | 241 | |
|
242 | 242 | # auto collapse if we have more than limit |
|
243 | 243 | collapse_limit = diffs.DiffProcessor._collapse_commits_over |
|
244 | 244 | c.collapse_all_commits = len(c.commit_ranges) > collapse_limit |
|
245 | 245 | |
|
246 | 246 | if partial: # for PR ajax commits loader |
|
247 | 247 | if not c.ancestor: |
|
248 | 248 | return Response('') # cannot merge if there is no ancestor |
|
249 | 249 | |
|
250 | 250 | html = render( |
|
251 | 251 | 'rhodecode:templates/compare/compare_commits.mako', |
|
252 | 252 | self._get_template_context(c), self.request) |
|
253 | 253 | return Response(html) |
|
254 | 254 | |
|
255 | 255 | if c.ancestor: |
|
256 | 256 | # case we want a simple diff without incoming commits, |
|
257 | 257 | # previewing what will be merged. |
|
258 | 258 | # Make the diff on target repo (which is known to have target_ref) |
|
259 | log.debug('Using ancestor %s as source_ref instead of %s' | |
|
260 |
|
|
|
259 | log.debug('Using ancestor %s as source_ref instead of %s', | |
|
260 | c.ancestor, source_ref) | |
|
261 | 261 | source_repo = target_repo |
|
262 | 262 | source_commit = target_repo.get_commit(commit_id=c.ancestor) |
|
263 | 263 | |
|
264 | 264 | # diff_limit will cut off the whole diff if the limit is applied |
|
265 | 265 | # otherwise it will just hide the big files from the front-end |
|
266 | 266 | diff_limit = c.visual.cut_off_limit_diff |
|
267 | 267 | file_limit = c.visual.cut_off_limit_file |
|
268 | 268 | |
|
269 | 269 | log.debug('calculating diff between ' |
|
270 | 270 | 'source_ref:%s and target_ref:%s for repo `%s`', |
|
271 | 271 | source_commit, target_commit, |
|
272 | 272 | safe_unicode(source_repo.scm_instance().path)) |
|
273 | 273 | |
|
274 | 274 | if source_commit.repository != target_commit.repository: |
|
275 | 275 | msg = _( |
|
276 | 276 | "Repositories unrelated. " |
|
277 | 277 | "Cannot compare commit %(commit1)s from repository %(repo1)s " |
|
278 | 278 | "with commit %(commit2)s from repository %(repo2)s.") % { |
|
279 | 279 | 'commit1': h.show_id(source_commit), |
|
280 | 280 | 'repo1': source_repo.repo_name, |
|
281 | 281 | 'commit2': h.show_id(target_commit), |
|
282 | 282 | 'repo2': target_repo.repo_name, |
|
283 | 283 | } |
|
284 | 284 | h.flash(msg, category='error') |
|
285 | 285 | raise HTTPFound( |
|
286 | 286 | h.route_path('repo_compare_select', |
|
287 | 287 | repo_name=self.db_repo_name)) |
|
288 | 288 | |
|
289 | 289 | txt_diff = source_repo.scm_instance().get_diff( |
|
290 | 290 | commit1=source_commit, commit2=target_commit, |
|
291 | 291 | path=target_path, path1=source_path) |
|
292 | 292 | |
|
293 | 293 | diff_processor = diffs.DiffProcessor( |
|
294 | 294 | txt_diff, format='newdiff', diff_limit=diff_limit, |
|
295 | 295 | file_limit=file_limit, show_full_diff=c.fulldiff) |
|
296 | 296 | _parsed = diff_processor.prepare() |
|
297 | 297 | |
|
298 | 298 | diffset = codeblocks.DiffSet( |
|
299 | 299 | repo_name=source_repo.repo_name, |
|
300 | 300 | source_node_getter=codeblocks.diffset_node_getter(source_commit), |
|
301 | 301 | target_node_getter=codeblocks.diffset_node_getter(target_commit), |
|
302 | 302 | ) |
|
303 | 303 | c.diffset = self.path_filter.render_patchset_filtered( |
|
304 | 304 | diffset, _parsed, source_ref, target_ref) |
|
305 | 305 | |
|
306 | 306 | c.preview_mode = merge |
|
307 | 307 | c.source_commit = source_commit |
|
308 | 308 | c.target_commit = target_commit |
|
309 | 309 | |
|
310 | 310 | html = render( |
|
311 | 311 | 'rhodecode:templates/compare/compare_diff.mako', |
|
312 | 312 | self._get_template_context(c), self.request) |
|
313 | 313 | return Response(html) No newline at end of file |
@@ -1,113 +1,113 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2017-2018 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import logging |
|
22 | 22 | from pyramid.view import view_config |
|
23 | 23 | |
|
24 | 24 | from rhodecode.apps._base import RepoAppView |
|
25 | 25 | from rhodecode.lib import audit_logger |
|
26 | 26 | from rhodecode.lib import helpers as h |
|
27 | 27 | from rhodecode.lib.auth import ( |
|
28 | 28 | LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired) |
|
29 | 29 | from rhodecode.lib.ext_json import json |
|
30 | 30 | |
|
31 | 31 | log = logging.getLogger(__name__) |
|
32 | 32 | |
|
33 | 33 | |
|
34 | 34 | class StripView(RepoAppView): |
|
35 | 35 | def load_default_context(self): |
|
36 | 36 | c = self._get_local_tmpl_context() |
|
37 | 37 | |
|
38 | 38 | |
|
39 | 39 | return c |
|
40 | 40 | |
|
41 | 41 | @LoginRequired() |
|
42 | 42 | @HasRepoPermissionAnyDecorator('repository.admin') |
|
43 | 43 | @view_config( |
|
44 | 44 | route_name='edit_repo_strip', request_method='GET', |
|
45 | 45 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') |
|
46 | 46 | def strip(self): |
|
47 | 47 | c = self.load_default_context() |
|
48 | 48 | c.active = 'strip' |
|
49 | 49 | c.strip_limit = 10 |
|
50 | 50 | |
|
51 | 51 | return self._get_template_context(c) |
|
52 | 52 | |
|
53 | 53 | @LoginRequired() |
|
54 | 54 | @HasRepoPermissionAnyDecorator('repository.admin') |
|
55 | 55 | @CSRFRequired() |
|
56 | 56 | @view_config( |
|
57 | 57 | route_name='strip_check', request_method='POST', |
|
58 | 58 | renderer='json', xhr=True) |
|
59 | 59 | def strip_check(self): |
|
60 | 60 | from rhodecode.lib.vcs.backends.base import EmptyCommit |
|
61 | 61 | data = {} |
|
62 | 62 | rp = self.request.POST |
|
63 | 63 | for i in range(1, 11): |
|
64 | 64 | chset = 'changeset_id-%d' % (i,) |
|
65 | 65 | check = rp.get(chset) |
|
66 | 66 | |
|
67 | 67 | if check: |
|
68 | 68 | data[i] = self.db_repo.get_changeset(rp[chset]) |
|
69 | 69 | if isinstance(data[i], EmptyCommit): |
|
70 | 70 | data[i] = {'rev': None, 'commit': h.escape(rp[chset])} |
|
71 | 71 | else: |
|
72 | 72 | data[i] = {'rev': data[i].raw_id, 'branch': data[i].branch, |
|
73 | 73 | 'author': h.escape(data[i].author), |
|
74 | 74 | 'comment': h.escape(data[i].message)} |
|
75 | 75 | else: |
|
76 | 76 | break |
|
77 | 77 | return data |
|
78 | 78 | |
|
79 | 79 | @LoginRequired() |
|
80 | 80 | @HasRepoPermissionAnyDecorator('repository.admin') |
|
81 | 81 | @CSRFRequired() |
|
82 | 82 | @view_config( |
|
83 | 83 | route_name='strip_execute', request_method='POST', |
|
84 | 84 | renderer='json', xhr=True) |
|
85 | 85 | def strip_execute(self): |
|
86 | 86 | from rhodecode.model.scm import ScmModel |
|
87 | 87 | |
|
88 | 88 | c = self.load_default_context() |
|
89 | 89 | user = self._rhodecode_user |
|
90 | 90 | rp = self.request.POST |
|
91 | 91 | data = {} |
|
92 | 92 | for idx in rp: |
|
93 | 93 | commit = json.loads(rp[idx]) |
|
94 | 94 | # If someone put two times the same branch |
|
95 | 95 | if commit['branch'] in data.keys(): |
|
96 | 96 | continue |
|
97 | 97 | try: |
|
98 | 98 | ScmModel().strip( |
|
99 | 99 | repo=self.db_repo, |
|
100 | 100 | commit_id=commit['rev'], branch=commit['branch']) |
|
101 |
log.info('Stripped commit %s from repo `%s` by %s' |
|
|
102 |
commit['rev'], self.db_repo_name, user) |
|
|
101 | log.info('Stripped commit %s from repo `%s` by %s', | |
|
102 | commit['rev'], self.db_repo_name, user) | |
|
103 | 103 | data[commit['rev']] = True |
|
104 | 104 | |
|
105 | 105 | audit_logger.store_web( |
|
106 | 106 | 'repo.commit.strip', action_data={'commit_id': commit['rev']}, |
|
107 | 107 | repo=self.db_repo, user=self._rhodecode_user, commit=True) |
|
108 | 108 | |
|
109 | 109 | except Exception as e: |
|
110 | 110 | data[commit['rev']] = False |
|
111 |
log.debug('Stripped commit %s from repo `%s` failed by %s, exeption %s' |
|
|
112 |
commit['rev'], self.db_repo_name, user, e.message) |
|
|
111 | log.debug('Stripped commit %s from repo `%s` failed by %s, exeption %s', | |
|
112 | commit['rev'], self.db_repo_name, user, e.message) | |
|
113 | 113 | return data |
@@ -1,285 +1,285 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2012-2018 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | RhodeCode authentication plugin for Atlassian CROWD |
|
23 | 23 | """ |
|
24 | 24 | |
|
25 | 25 | |
|
26 | 26 | import colander |
|
27 | 27 | import base64 |
|
28 | 28 | import logging |
|
29 | 29 | import urllib2 |
|
30 | 30 | |
|
31 | 31 | from rhodecode.translation import _ |
|
32 | 32 | from rhodecode.authentication.base import ( |
|
33 | 33 | RhodeCodeExternalAuthPlugin, hybrid_property) |
|
34 | 34 | from rhodecode.authentication.schema import AuthnPluginSettingsSchemaBase |
|
35 | 35 | from rhodecode.authentication.routes import AuthnPluginResourceBase |
|
36 | 36 | from rhodecode.lib.colander_utils import strip_whitespace |
|
37 | 37 | from rhodecode.lib.ext_json import json, formatted_json |
|
38 | 38 | from rhodecode.model.db import User |
|
39 | 39 | |
|
40 | 40 | log = logging.getLogger(__name__) |
|
41 | 41 | |
|
42 | 42 | |
|
43 | 43 | def plugin_factory(plugin_id, *args, **kwds): |
|
44 | 44 | """ |
|
45 | 45 | Factory function that is called during plugin discovery. |
|
46 | 46 | It returns the plugin instance. |
|
47 | 47 | """ |
|
48 | 48 | plugin = RhodeCodeAuthPlugin(plugin_id) |
|
49 | 49 | return plugin |
|
50 | 50 | |
|
51 | 51 | |
|
52 | 52 | class CrowdAuthnResource(AuthnPluginResourceBase): |
|
53 | 53 | pass |
|
54 | 54 | |
|
55 | 55 | |
|
56 | 56 | class CrowdSettingsSchema(AuthnPluginSettingsSchemaBase): |
|
57 | 57 | host = colander.SchemaNode( |
|
58 | 58 | colander.String(), |
|
59 | 59 | default='127.0.0.1', |
|
60 | 60 | description=_('The FQDN or IP of the Atlassian CROWD Server'), |
|
61 | 61 | preparer=strip_whitespace, |
|
62 | 62 | title=_('Host'), |
|
63 | 63 | widget='string') |
|
64 | 64 | port = colander.SchemaNode( |
|
65 | 65 | colander.Int(), |
|
66 | 66 | default=8095, |
|
67 | 67 | description=_('The Port in use by the Atlassian CROWD Server'), |
|
68 | 68 | preparer=strip_whitespace, |
|
69 | 69 | title=_('Port'), |
|
70 | 70 | validator=colander.Range(min=0, max=65536), |
|
71 | 71 | widget='int') |
|
72 | 72 | app_name = colander.SchemaNode( |
|
73 | 73 | colander.String(), |
|
74 | 74 | default='', |
|
75 | 75 | description=_('The Application Name to authenticate to CROWD'), |
|
76 | 76 | preparer=strip_whitespace, |
|
77 | 77 | title=_('Application Name'), |
|
78 | 78 | widget='string') |
|
79 | 79 | app_password = colander.SchemaNode( |
|
80 | 80 | colander.String(), |
|
81 | 81 | default='', |
|
82 | 82 | description=_('The password to authenticate to CROWD'), |
|
83 | 83 | preparer=strip_whitespace, |
|
84 | 84 | title=_('Application Password'), |
|
85 | 85 | widget='password') |
|
86 | 86 | admin_groups = colander.SchemaNode( |
|
87 | 87 | colander.String(), |
|
88 | 88 | default='', |
|
89 | 89 | description=_('A comma separated list of group names that identify ' |
|
90 | 90 | 'users as RhodeCode Administrators'), |
|
91 | 91 | missing='', |
|
92 | 92 | preparer=strip_whitespace, |
|
93 | 93 | title=_('Admin Groups'), |
|
94 | 94 | widget='string') |
|
95 | 95 | |
|
96 | 96 | |
|
97 | 97 | class CrowdServer(object): |
|
98 | 98 | def __init__(self, *args, **kwargs): |
|
99 | 99 | """ |
|
100 | 100 | Create a new CrowdServer object that points to IP/Address 'host', |
|
101 | 101 | on the given port, and using the given method (https/http). user and |
|
102 | 102 | passwd can be set here or with set_credentials. If unspecified, |
|
103 | 103 | "version" defaults to "latest". |
|
104 | 104 | |
|
105 | 105 | example:: |
|
106 | 106 | |
|
107 | 107 | cserver = CrowdServer(host="127.0.0.1", |
|
108 | 108 | port="8095", |
|
109 | 109 | user="some_app", |
|
110 | 110 | passwd="some_passwd", |
|
111 | 111 | version="1") |
|
112 | 112 | """ |
|
113 | 113 | if not "port" in kwargs: |
|
114 | 114 | kwargs["port"] = "8095" |
|
115 | 115 | self._logger = kwargs.get("logger", logging.getLogger(__name__)) |
|
116 | 116 | self._uri = "%s://%s:%s/crowd" % (kwargs.get("method", "http"), |
|
117 | 117 | kwargs.get("host", "127.0.0.1"), |
|
118 | 118 | kwargs.get("port", "8095")) |
|
119 | 119 | self.set_credentials(kwargs.get("user", ""), |
|
120 | 120 | kwargs.get("passwd", "")) |
|
121 | 121 | self._version = kwargs.get("version", "latest") |
|
122 | 122 | self._url_list = None |
|
123 | 123 | self._appname = "crowd" |
|
124 | 124 | |
|
125 | 125 | def set_credentials(self, user, passwd): |
|
126 | 126 | self.user = user |
|
127 | 127 | self.passwd = passwd |
|
128 | 128 | self._make_opener() |
|
129 | 129 | |
|
130 | 130 | def _make_opener(self): |
|
131 | 131 | mgr = urllib2.HTTPPasswordMgrWithDefaultRealm() |
|
132 | 132 | mgr.add_password(None, self._uri, self.user, self.passwd) |
|
133 | 133 | handler = urllib2.HTTPBasicAuthHandler(mgr) |
|
134 | 134 | self.opener = urllib2.build_opener(handler) |
|
135 | 135 | |
|
136 | 136 | def _request(self, url, body=None, headers=None, |
|
137 | 137 | method=None, noformat=False, |
|
138 | 138 | empty_response_ok=False): |
|
139 | 139 | _headers = {"Content-type": "application/json", |
|
140 | 140 | "Accept": "application/json"} |
|
141 | 141 | if self.user and self.passwd: |
|
142 | 142 | authstring = base64.b64encode("%s:%s" % (self.user, self.passwd)) |
|
143 | 143 | _headers["Authorization"] = "Basic %s" % authstring |
|
144 | 144 | if headers: |
|
145 | 145 | _headers.update(headers) |
|
146 | 146 | log.debug("Sent crowd: \n%s" |
|
147 | 147 | % (formatted_json({"url": url, "body": body, |
|
148 | 148 | "headers": _headers}))) |
|
149 | 149 | request = urllib2.Request(url, body, _headers) |
|
150 | 150 | if method: |
|
151 | 151 | request.get_method = lambda: method |
|
152 | 152 | |
|
153 | 153 | global msg |
|
154 | 154 | msg = "" |
|
155 | 155 | try: |
|
156 | 156 | rdoc = self.opener.open(request) |
|
157 | 157 | msg = "".join(rdoc.readlines()) |
|
158 | 158 | if not msg and empty_response_ok: |
|
159 | 159 | rval = {} |
|
160 | 160 | rval["status"] = True |
|
161 | 161 | rval["error"] = "Response body was empty" |
|
162 | 162 | elif not noformat: |
|
163 | 163 | rval = json.loads(msg) |
|
164 | 164 | rval["status"] = True |
|
165 | 165 | else: |
|
166 | 166 | rval = "".join(rdoc.readlines()) |
|
167 | 167 | except Exception as e: |
|
168 | 168 | if not noformat: |
|
169 | 169 | rval = {"status": False, |
|
170 | 170 | "body": body, |
|
171 | 171 | "error": str(e) + "\n" + msg} |
|
172 | 172 | else: |
|
173 | 173 | rval = None |
|
174 | 174 | return rval |
|
175 | 175 | |
|
176 | 176 | def user_auth(self, username, password): |
|
177 | 177 | """Authenticate a user against crowd. Returns brief information about |
|
178 | 178 | the user.""" |
|
179 | 179 | url = ("%s/rest/usermanagement/%s/authentication?username=%s" |
|
180 | 180 | % (self._uri, self._version, username)) |
|
181 | 181 | body = json.dumps({"value": password}) |
|
182 | 182 | return self._request(url, body) |
|
183 | 183 | |
|
184 | 184 | def user_groups(self, username): |
|
185 | 185 | """Retrieve a list of groups to which this user belongs.""" |
|
186 | 186 | url = ("%s/rest/usermanagement/%s/user/group/nested?username=%s" |
|
187 | 187 | % (self._uri, self._version, username)) |
|
188 | 188 | return self._request(url) |
|
189 | 189 | |
|
190 | 190 | |
|
191 | 191 | class RhodeCodeAuthPlugin(RhodeCodeExternalAuthPlugin): |
|
192 | 192 | _settings_unsafe_keys = ['app_password'] |
|
193 | 193 | |
|
194 | 194 | def includeme(self, config): |
|
195 | 195 | config.add_authn_plugin(self) |
|
196 | 196 | config.add_authn_resource(self.get_id(), CrowdAuthnResource(self)) |
|
197 | 197 | config.add_view( |
|
198 | 198 | 'rhodecode.authentication.views.AuthnPluginViewBase', |
|
199 | 199 | attr='settings_get', |
|
200 | 200 | renderer='rhodecode:templates/admin/auth/plugin_settings.mako', |
|
201 | 201 | request_method='GET', |
|
202 | 202 | route_name='auth_home', |
|
203 | 203 | context=CrowdAuthnResource) |
|
204 | 204 | config.add_view( |
|
205 | 205 | 'rhodecode.authentication.views.AuthnPluginViewBase', |
|
206 | 206 | attr='settings_post', |
|
207 | 207 | renderer='rhodecode:templates/admin/auth/plugin_settings.mako', |
|
208 | 208 | request_method='POST', |
|
209 | 209 | route_name='auth_home', |
|
210 | 210 | context=CrowdAuthnResource) |
|
211 | 211 | |
|
212 | 212 | def get_settings_schema(self): |
|
213 | 213 | return CrowdSettingsSchema() |
|
214 | 214 | |
|
215 | 215 | def get_display_name(self): |
|
216 | 216 | return _('CROWD') |
|
217 | 217 | |
|
218 | 218 | @hybrid_property |
|
219 | 219 | def name(self): |
|
220 | 220 | return "crowd" |
|
221 | 221 | |
|
222 | 222 | def use_fake_password(self): |
|
223 | 223 | return True |
|
224 | 224 | |
|
225 | 225 | def user_activation_state(self): |
|
226 | 226 | def_user_perms = User.get_default_user().AuthUser().permissions['global'] |
|
227 | 227 | return 'hg.extern_activate.auto' in def_user_perms |
|
228 | 228 | |
|
229 | 229 | def auth(self, userobj, username, password, settings, **kwargs): |
|
230 | 230 | """ |
|
231 | 231 | Given a user object (which may be null), username, a plaintext password, |
|
232 | 232 | and a settings object (containing all the keys needed as listed in settings()), |
|
233 | 233 | authenticate this user's login attempt. |
|
234 | 234 | |
|
235 | 235 | Return None on failure. On success, return a dictionary of the form: |
|
236 | 236 | |
|
237 | 237 | see: RhodeCodeAuthPluginBase.auth_func_attrs |
|
238 | 238 | This is later validated for correctness |
|
239 | 239 | """ |
|
240 | 240 | if not username or not password: |
|
241 | 241 | log.debug('Empty username or password skipping...') |
|
242 | 242 | return None |
|
243 | 243 | |
|
244 |
log.debug("Crowd settings: \n%s" |
|
|
244 | log.debug("Crowd settings: \n%s", formatted_json(settings)) | |
|
245 | 245 | server = CrowdServer(**settings) |
|
246 | 246 | server.set_credentials(settings["app_name"], settings["app_password"]) |
|
247 | 247 | crowd_user = server.user_auth(username, password) |
|
248 |
log.debug("Crowd returned: \n%s" |
|
|
248 | log.debug("Crowd returned: \n%s", formatted_json(crowd_user)) | |
|
249 | 249 | if not crowd_user["status"]: |
|
250 | 250 | return None |
|
251 | 251 | |
|
252 | 252 | res = server.user_groups(crowd_user["name"]) |
|
253 |
log.debug("Crowd groups: \n%s" |
|
|
253 | log.debug("Crowd groups: \n%s", formatted_json(res)) | |
|
254 | 254 | crowd_user["groups"] = [x["name"] for x in res["groups"]] |
|
255 | 255 | |
|
256 | 256 | # old attrs fetched from RhodeCode database |
|
257 | 257 | admin = getattr(userobj, 'admin', False) |
|
258 | 258 | active = getattr(userobj, 'active', True) |
|
259 | 259 | email = getattr(userobj, 'email', '') |
|
260 | 260 | username = getattr(userobj, 'username', username) |
|
261 | 261 | firstname = getattr(userobj, 'firstname', '') |
|
262 | 262 | lastname = getattr(userobj, 'lastname', '') |
|
263 | 263 | extern_type = getattr(userobj, 'extern_type', '') |
|
264 | 264 | |
|
265 | 265 | user_attrs = { |
|
266 | 266 | 'username': username, |
|
267 | 267 | 'firstname': crowd_user["first-name"] or firstname, |
|
268 | 268 | 'lastname': crowd_user["last-name"] or lastname, |
|
269 | 269 | 'groups': crowd_user["groups"], |
|
270 | 270 | 'user_group_sync': True, |
|
271 | 271 | 'email': crowd_user["email"] or email, |
|
272 | 272 | 'admin': admin, |
|
273 | 273 | 'active': active, |
|
274 | 274 | 'active_from_extern': crowd_user.get('active'), |
|
275 | 275 | 'extern_name': crowd_user["name"], |
|
276 | 276 | 'extern_type': extern_type, |
|
277 | 277 | } |
|
278 | 278 | |
|
279 | 279 | # set an admin if we're in admin_groups of crowd |
|
280 | 280 | for group in settings["admin_groups"]: |
|
281 | 281 | if group in user_attrs["groups"]: |
|
282 | 282 | user_attrs["admin"] = True |
|
283 |
log.debug("Final crowd user object: \n%s" |
|
|
284 |
log.info('user `%s` authenticated correctly' |
|
|
283 | log.debug("Final crowd user object: \n%s", formatted_json(user_attrs)) | |
|
284 | log.info('user `%s` authenticated correctly', user_attrs['username']) | |
|
285 | 285 | return user_attrs |
@@ -1,225 +1,225 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2012-2018 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import colander |
|
22 | 22 | import logging |
|
23 | 23 | |
|
24 | 24 | from rhodecode.translation import _ |
|
25 | 25 | from rhodecode.authentication.base import ( |
|
26 | 26 | RhodeCodeExternalAuthPlugin, hybrid_property) |
|
27 | 27 | from rhodecode.authentication.schema import AuthnPluginSettingsSchemaBase |
|
28 | 28 | from rhodecode.authentication.routes import AuthnPluginResourceBase |
|
29 | 29 | from rhodecode.lib.colander_utils import strip_whitespace |
|
30 | 30 | from rhodecode.lib.utils2 import str2bool, safe_unicode |
|
31 | 31 | from rhodecode.model.db import User |
|
32 | 32 | |
|
33 | 33 | |
|
34 | 34 | log = logging.getLogger(__name__) |
|
35 | 35 | |
|
36 | 36 | |
|
37 | 37 | def plugin_factory(plugin_id, *args, **kwds): |
|
38 | 38 | """ |
|
39 | 39 | Factory function that is called during plugin discovery. |
|
40 | 40 | It returns the plugin instance. |
|
41 | 41 | """ |
|
42 | 42 | plugin = RhodeCodeAuthPlugin(plugin_id) |
|
43 | 43 | return plugin |
|
44 | 44 | |
|
45 | 45 | |
|
46 | 46 | class HeadersAuthnResource(AuthnPluginResourceBase): |
|
47 | 47 | pass |
|
48 | 48 | |
|
49 | 49 | |
|
50 | 50 | class HeadersSettingsSchema(AuthnPluginSettingsSchemaBase): |
|
51 | 51 | header = colander.SchemaNode( |
|
52 | 52 | colander.String(), |
|
53 | 53 | default='REMOTE_USER', |
|
54 | 54 | description=_('Header to extract the user from'), |
|
55 | 55 | preparer=strip_whitespace, |
|
56 | 56 | title=_('Header'), |
|
57 | 57 | widget='string') |
|
58 | 58 | fallback_header = colander.SchemaNode( |
|
59 | 59 | colander.String(), |
|
60 | 60 | default='HTTP_X_FORWARDED_USER', |
|
61 | 61 | description=_('Header to extract the user from when main one fails'), |
|
62 | 62 | preparer=strip_whitespace, |
|
63 | 63 | title=_('Fallback header'), |
|
64 | 64 | widget='string') |
|
65 | 65 | clean_username = colander.SchemaNode( |
|
66 | 66 | colander.Boolean(), |
|
67 | 67 | default=True, |
|
68 | 68 | description=_('Perform cleaning of user, if passed user has @ in ' |
|
69 | 69 | 'username then first part before @ is taken. ' |
|
70 | 70 | 'If there\'s \\ in the username only the part after ' |
|
71 | 71 | ' \\ is taken'), |
|
72 | 72 | missing=False, |
|
73 | 73 | title=_('Clean username'), |
|
74 | 74 | widget='bool') |
|
75 | 75 | |
|
76 | 76 | |
|
77 | 77 | class RhodeCodeAuthPlugin(RhodeCodeExternalAuthPlugin): |
|
78 | 78 | |
|
79 | 79 | def includeme(self, config): |
|
80 | 80 | config.add_authn_plugin(self) |
|
81 | 81 | config.add_authn_resource(self.get_id(), HeadersAuthnResource(self)) |
|
82 | 82 | config.add_view( |
|
83 | 83 | 'rhodecode.authentication.views.AuthnPluginViewBase', |
|
84 | 84 | attr='settings_get', |
|
85 | 85 | renderer='rhodecode:templates/admin/auth/plugin_settings.mako', |
|
86 | 86 | request_method='GET', |
|
87 | 87 | route_name='auth_home', |
|
88 | 88 | context=HeadersAuthnResource) |
|
89 | 89 | config.add_view( |
|
90 | 90 | 'rhodecode.authentication.views.AuthnPluginViewBase', |
|
91 | 91 | attr='settings_post', |
|
92 | 92 | renderer='rhodecode:templates/admin/auth/plugin_settings.mako', |
|
93 | 93 | request_method='POST', |
|
94 | 94 | route_name='auth_home', |
|
95 | 95 | context=HeadersAuthnResource) |
|
96 | 96 | |
|
97 | 97 | def get_display_name(self): |
|
98 | 98 | return _('Headers') |
|
99 | 99 | |
|
100 | 100 | def get_settings_schema(self): |
|
101 | 101 | return HeadersSettingsSchema() |
|
102 | 102 | |
|
103 | 103 | @hybrid_property |
|
104 | 104 | def name(self): |
|
105 | 105 | return 'headers' |
|
106 | 106 | |
|
107 | 107 | @property |
|
108 | 108 | def is_headers_auth(self): |
|
109 | 109 | return True |
|
110 | 110 | |
|
111 | 111 | def use_fake_password(self): |
|
112 | 112 | return True |
|
113 | 113 | |
|
114 | 114 | def user_activation_state(self): |
|
115 | 115 | def_user_perms = User.get_default_user().AuthUser().permissions['global'] |
|
116 | 116 | return 'hg.extern_activate.auto' in def_user_perms |
|
117 | 117 | |
|
118 | 118 | def _clean_username(self, username): |
|
119 | 119 | # Removing realm and domain from username |
|
120 | 120 | username = username.split('@')[0] |
|
121 | 121 | username = username.rsplit('\\')[-1] |
|
122 | 122 | return username |
|
123 | 123 | |
|
124 | 124 | def _get_username(self, environ, settings): |
|
125 | 125 | username = None |
|
126 | 126 | environ = environ or {} |
|
127 | 127 | if not environ: |
|
128 |
log.debug('got empty environ: %s' |
|
|
128 | log.debug('got empty environ: %s', environ) | |
|
129 | 129 | |
|
130 | 130 | settings = settings or {} |
|
131 | 131 | if settings.get('header'): |
|
132 | 132 | header = settings.get('header') |
|
133 | 133 | username = environ.get(header) |
|
134 |
log.debug('extracted %s:%s' |
|
|
134 | log.debug('extracted %s:%s', header, username) | |
|
135 | 135 | |
|
136 | 136 | # fallback mode |
|
137 | 137 | if not username and settings.get('fallback_header'): |
|
138 | 138 | header = settings.get('fallback_header') |
|
139 | 139 | username = environ.get(header) |
|
140 |
log.debug('extracted %s:%s' |
|
|
140 | log.debug('extracted %s:%s', header, username) | |
|
141 | 141 | |
|
142 | 142 | if username and str2bool(settings.get('clean_username')): |
|
143 |
log.debug('Received username `%s` from headers' |
|
|
143 | log.debug('Received username `%s` from headers', username) | |
|
144 | 144 | username = self._clean_username(username) |
|
145 |
log.debug('New cleanup user is:%s' |
|
|
145 | log.debug('New cleanup user is:%s', username) | |
|
146 | 146 | return username |
|
147 | 147 | |
|
148 | 148 | def get_user(self, username=None, **kwargs): |
|
149 | 149 | """ |
|
150 | 150 | Helper method for user fetching in plugins, by default it's using |
|
151 | 151 | simple fetch by username, but this method can be custimized in plugins |
|
152 | 152 | eg. headers auth plugin to fetch user by environ params |
|
153 | 153 | :param username: username if given to fetch |
|
154 | 154 | :param kwargs: extra arguments needed for user fetching. |
|
155 | 155 | """ |
|
156 | 156 | environ = kwargs.get('environ') or {} |
|
157 | 157 | settings = kwargs.get('settings') or {} |
|
158 | 158 | username = self._get_username(environ, settings) |
|
159 | 159 | # we got the username, so use default method now |
|
160 | 160 | return super(RhodeCodeAuthPlugin, self).get_user(username) |
|
161 | 161 | |
|
162 | 162 | def auth(self, userobj, username, password, settings, **kwargs): |
|
163 | 163 | """ |
|
164 | 164 | Get's the headers_auth username (or email). It tries to get username |
|
165 | 165 | from REMOTE_USER if this plugin is enabled, if that fails |
|
166 | 166 | it tries to get username from HTTP_X_FORWARDED_USER if fallback header |
|
167 | 167 | is set. clean_username extracts the username from this data if it's |
|
168 | 168 | having @ in it. |
|
169 | 169 | Return None on failure. On success, return a dictionary of the form: |
|
170 | 170 | |
|
171 | 171 | see: RhodeCodeAuthPluginBase.auth_func_attrs |
|
172 | 172 | |
|
173 | 173 | :param userobj: |
|
174 | 174 | :param username: |
|
175 | 175 | :param password: |
|
176 | 176 | :param settings: |
|
177 | 177 | :param kwargs: |
|
178 | 178 | """ |
|
179 | 179 | environ = kwargs.get('environ') |
|
180 | 180 | if not environ: |
|
181 | 181 | log.debug('Empty environ data skipping...') |
|
182 | 182 | return None |
|
183 | 183 | |
|
184 | 184 | if not userobj: |
|
185 | 185 | userobj = self.get_user('', environ=environ, settings=settings) |
|
186 | 186 | |
|
187 | 187 | # we don't care passed username/password for headers auth plugins. |
|
188 | 188 | # only way to log in is using environ |
|
189 | 189 | username = None |
|
190 | 190 | if userobj: |
|
191 | 191 | username = getattr(userobj, 'username') |
|
192 | 192 | |
|
193 | 193 | if not username: |
|
194 | 194 | # we don't have any objects in DB user doesn't exist extract |
|
195 | 195 | # username from environ based on the settings |
|
196 | 196 | username = self._get_username(environ, settings) |
|
197 | 197 | |
|
198 | 198 | # if cannot fetch username, it's a no-go for this plugin to proceed |
|
199 | 199 | if not username: |
|
200 | 200 | return None |
|
201 | 201 | |
|
202 | 202 | # old attrs fetched from RhodeCode database |
|
203 | 203 | admin = getattr(userobj, 'admin', False) |
|
204 | 204 | active = getattr(userobj, 'active', True) |
|
205 | 205 | email = getattr(userobj, 'email', '') |
|
206 | 206 | firstname = getattr(userobj, 'firstname', '') |
|
207 | 207 | lastname = getattr(userobj, 'lastname', '') |
|
208 | 208 | extern_type = getattr(userobj, 'extern_type', '') |
|
209 | 209 | |
|
210 | 210 | user_attrs = { |
|
211 | 211 | 'username': username, |
|
212 | 212 | 'firstname': safe_unicode(firstname or username), |
|
213 | 213 | 'lastname': safe_unicode(lastname or ''), |
|
214 | 214 | 'groups': [], |
|
215 | 215 | 'user_group_sync': False, |
|
216 | 216 | 'email': email or '', |
|
217 | 217 | 'admin': admin or False, |
|
218 | 218 | 'active': active, |
|
219 | 219 | 'active_from_extern': True, |
|
220 | 220 | 'extern_name': username, |
|
221 | 221 | 'extern_type': extern_type, |
|
222 | 222 | } |
|
223 | 223 | |
|
224 |
log.info('user `%s` authenticated correctly' |
|
|
224 | log.info('user `%s` authenticated correctly', user_attrs['username']) | |
|
225 | 225 | return user_attrs |
@@ -1,167 +1,167 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2012-2018 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | RhodeCode authentication plugin for Jasig CAS |
|
23 | 23 | http://www.jasig.org/cas |
|
24 | 24 | """ |
|
25 | 25 | |
|
26 | 26 | |
|
27 | 27 | import colander |
|
28 | 28 | import logging |
|
29 | 29 | import rhodecode |
|
30 | 30 | import urllib |
|
31 | 31 | import urllib2 |
|
32 | 32 | |
|
33 | 33 | from rhodecode.translation import _ |
|
34 | 34 | from rhodecode.authentication.base import ( |
|
35 | 35 | RhodeCodeExternalAuthPlugin, hybrid_property) |
|
36 | 36 | from rhodecode.authentication.schema import AuthnPluginSettingsSchemaBase |
|
37 | 37 | from rhodecode.authentication.routes import AuthnPluginResourceBase |
|
38 | 38 | from rhodecode.lib.colander_utils import strip_whitespace |
|
39 | 39 | from rhodecode.lib.utils2 import safe_unicode |
|
40 | 40 | from rhodecode.model.db import User |
|
41 | 41 | |
|
42 | 42 | log = logging.getLogger(__name__) |
|
43 | 43 | |
|
44 | 44 | |
|
45 | 45 | def plugin_factory(plugin_id, *args, **kwds): |
|
46 | 46 | """ |
|
47 | 47 | Factory function that is called during plugin discovery. |
|
48 | 48 | It returns the plugin instance. |
|
49 | 49 | """ |
|
50 | 50 | plugin = RhodeCodeAuthPlugin(plugin_id) |
|
51 | 51 | return plugin |
|
52 | 52 | |
|
53 | 53 | |
|
54 | 54 | class JasigCasAuthnResource(AuthnPluginResourceBase): |
|
55 | 55 | pass |
|
56 | 56 | |
|
57 | 57 | |
|
58 | 58 | class JasigCasSettingsSchema(AuthnPluginSettingsSchemaBase): |
|
59 | 59 | service_url = colander.SchemaNode( |
|
60 | 60 | colander.String(), |
|
61 | 61 | default='https://domain.com/cas/v1/tickets', |
|
62 | 62 | description=_('The url of the Jasig CAS REST service'), |
|
63 | 63 | preparer=strip_whitespace, |
|
64 | 64 | title=_('URL'), |
|
65 | 65 | widget='string') |
|
66 | 66 | |
|
67 | 67 | |
|
68 | 68 | class RhodeCodeAuthPlugin(RhodeCodeExternalAuthPlugin): |
|
69 | 69 | |
|
70 | 70 | def includeme(self, config): |
|
71 | 71 | config.add_authn_plugin(self) |
|
72 | 72 | config.add_authn_resource(self.get_id(), JasigCasAuthnResource(self)) |
|
73 | 73 | config.add_view( |
|
74 | 74 | 'rhodecode.authentication.views.AuthnPluginViewBase', |
|
75 | 75 | attr='settings_get', |
|
76 | 76 | renderer='rhodecode:templates/admin/auth/plugin_settings.mako', |
|
77 | 77 | request_method='GET', |
|
78 | 78 | route_name='auth_home', |
|
79 | 79 | context=JasigCasAuthnResource) |
|
80 | 80 | config.add_view( |
|
81 | 81 | 'rhodecode.authentication.views.AuthnPluginViewBase', |
|
82 | 82 | attr='settings_post', |
|
83 | 83 | renderer='rhodecode:templates/admin/auth/plugin_settings.mako', |
|
84 | 84 | request_method='POST', |
|
85 | 85 | route_name='auth_home', |
|
86 | 86 | context=JasigCasAuthnResource) |
|
87 | 87 | |
|
88 | 88 | def get_settings_schema(self): |
|
89 | 89 | return JasigCasSettingsSchema() |
|
90 | 90 | |
|
91 | 91 | def get_display_name(self): |
|
92 | 92 | return _('Jasig-CAS') |
|
93 | 93 | |
|
94 | 94 | @hybrid_property |
|
95 | 95 | def name(self): |
|
96 | 96 | return "jasig-cas" |
|
97 | 97 | |
|
98 | 98 | @property |
|
99 | 99 | def is_headers_auth(self): |
|
100 | 100 | return True |
|
101 | 101 | |
|
102 | 102 | def use_fake_password(self): |
|
103 | 103 | return True |
|
104 | 104 | |
|
105 | 105 | def user_activation_state(self): |
|
106 | 106 | def_user_perms = User.get_default_user().AuthUser().permissions['global'] |
|
107 | 107 | return 'hg.extern_activate.auto' in def_user_perms |
|
108 | 108 | |
|
109 | 109 | def auth(self, userobj, username, password, settings, **kwargs): |
|
110 | 110 | """ |
|
111 | 111 | Given a user object (which may be null), username, a plaintext password, |
|
112 | 112 | and a settings object (containing all the keys needed as listed in settings()), |
|
113 | 113 | authenticate this user's login attempt. |
|
114 | 114 | |
|
115 | 115 | Return None on failure. On success, return a dictionary of the form: |
|
116 | 116 | |
|
117 | 117 | see: RhodeCodeAuthPluginBase.auth_func_attrs |
|
118 | 118 | This is later validated for correctness |
|
119 | 119 | """ |
|
120 | 120 | if not username or not password: |
|
121 | 121 | log.debug('Empty username or password skipping...') |
|
122 | 122 | return None |
|
123 | 123 | |
|
124 | 124 | log.debug("Jasig CAS settings: %s", settings) |
|
125 | 125 | params = urllib.urlencode({'username': username, 'password': password}) |
|
126 | 126 | headers = {"Content-type": "application/x-www-form-urlencoded", |
|
127 | 127 | "Accept": "text/plain", |
|
128 | 128 | "User-Agent": "RhodeCode-auth-%s" % rhodecode.__version__} |
|
129 | 129 | url = settings["service_url"] |
|
130 | 130 | |
|
131 | 131 | log.debug("Sent Jasig CAS: \n%s", |
|
132 | 132 | {"url": url, "body": params, "headers": headers}) |
|
133 | 133 | request = urllib2.Request(url, params, headers) |
|
134 | 134 | try: |
|
135 | 135 | response = urllib2.urlopen(request) |
|
136 | 136 | except urllib2.HTTPError as e: |
|
137 |
log.debug("HTTPError when requesting Jasig CAS (status code: %d)" |
|
|
137 | log.debug("HTTPError when requesting Jasig CAS (status code: %d)", e.code) | |
|
138 | 138 | return None |
|
139 | 139 | except urllib2.URLError as e: |
|
140 |
log.debug("URLError when requesting Jasig CAS url: %s " |
|
|
140 | log.debug("URLError when requesting Jasig CAS url: %s ", url) | |
|
141 | 141 | return None |
|
142 | 142 | |
|
143 | 143 | # old attrs fetched from RhodeCode database |
|
144 | 144 | admin = getattr(userobj, 'admin', False) |
|
145 | 145 | active = getattr(userobj, 'active', True) |
|
146 | 146 | email = getattr(userobj, 'email', '') |
|
147 | 147 | username = getattr(userobj, 'username', username) |
|
148 | 148 | firstname = getattr(userobj, 'firstname', '') |
|
149 | 149 | lastname = getattr(userobj, 'lastname', '') |
|
150 | 150 | extern_type = getattr(userobj, 'extern_type', '') |
|
151 | 151 | |
|
152 | 152 | user_attrs = { |
|
153 | 153 | 'username': username, |
|
154 | 154 | 'firstname': safe_unicode(firstname or username), |
|
155 | 155 | 'lastname': safe_unicode(lastname or ''), |
|
156 | 156 | 'groups': [], |
|
157 | 157 | 'user_group_sync': False, |
|
158 | 158 | 'email': email or '', |
|
159 | 159 | 'admin': admin or False, |
|
160 | 160 | 'active': active, |
|
161 | 161 | 'active_from_extern': True, |
|
162 | 162 | 'extern_name': username, |
|
163 | 163 | 'extern_type': extern_type, |
|
164 | 164 | } |
|
165 | 165 | |
|
166 |
log.info('user `%s` authenticated correctly' |
|
|
166 | log.info('user `%s` authenticated correctly', user_attrs['username']) | |
|
167 | 167 | return user_attrs |
@@ -1,161 +1,161 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2012-2018 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | RhodeCode authentication library for PAM |
|
23 | 23 | """ |
|
24 | 24 | |
|
25 | 25 | import colander |
|
26 | 26 | import grp |
|
27 | 27 | import logging |
|
28 | 28 | import pam |
|
29 | 29 | import pwd |
|
30 | 30 | import re |
|
31 | 31 | import socket |
|
32 | 32 | |
|
33 | 33 | from rhodecode.translation import _ |
|
34 | 34 | from rhodecode.authentication.base import ( |
|
35 | 35 | RhodeCodeExternalAuthPlugin, hybrid_property) |
|
36 | 36 | from rhodecode.authentication.schema import AuthnPluginSettingsSchemaBase |
|
37 | 37 | from rhodecode.authentication.routes import AuthnPluginResourceBase |
|
38 | 38 | from rhodecode.lib.colander_utils import strip_whitespace |
|
39 | 39 | |
|
40 | 40 | log = logging.getLogger(__name__) |
|
41 | 41 | |
|
42 | 42 | |
|
43 | 43 | def plugin_factory(plugin_id, *args, **kwds): |
|
44 | 44 | """ |
|
45 | 45 | Factory function that is called during plugin discovery. |
|
46 | 46 | It returns the plugin instance. |
|
47 | 47 | """ |
|
48 | 48 | plugin = RhodeCodeAuthPlugin(plugin_id) |
|
49 | 49 | return plugin |
|
50 | 50 | |
|
51 | 51 | |
|
52 | 52 | class PamAuthnResource(AuthnPluginResourceBase): |
|
53 | 53 | pass |
|
54 | 54 | |
|
55 | 55 | |
|
56 | 56 | class PamSettingsSchema(AuthnPluginSettingsSchemaBase): |
|
57 | 57 | service = colander.SchemaNode( |
|
58 | 58 | colander.String(), |
|
59 | 59 | default='login', |
|
60 | 60 | description=_('PAM service name to use for authentication.'), |
|
61 | 61 | preparer=strip_whitespace, |
|
62 | 62 | title=_('PAM service name'), |
|
63 | 63 | widget='string') |
|
64 | 64 | gecos = colander.SchemaNode( |
|
65 | 65 | colander.String(), |
|
66 | 66 | default='(?P<last_name>.+),\s*(?P<first_name>\w+)', |
|
67 | 67 | description=_('Regular expression for extracting user name/email etc. ' |
|
68 | 68 | 'from Unix userinfo.'), |
|
69 | 69 | preparer=strip_whitespace, |
|
70 | 70 | title=_('Gecos Regex'), |
|
71 | 71 | widget='string') |
|
72 | 72 | |
|
73 | 73 | |
|
74 | 74 | class RhodeCodeAuthPlugin(RhodeCodeExternalAuthPlugin): |
|
75 | 75 | # PAM authentication can be slow. Repository operations involve a lot of |
|
76 | 76 | # auth calls. Little caching helps speedup push/pull operations significantly |
|
77 | 77 | AUTH_CACHE_TTL = 4 |
|
78 | 78 | |
|
79 | 79 | def includeme(self, config): |
|
80 | 80 | config.add_authn_plugin(self) |
|
81 | 81 | config.add_authn_resource(self.get_id(), PamAuthnResource(self)) |
|
82 | 82 | config.add_view( |
|
83 | 83 | 'rhodecode.authentication.views.AuthnPluginViewBase', |
|
84 | 84 | attr='settings_get', |
|
85 | 85 | renderer='rhodecode:templates/admin/auth/plugin_settings.mako', |
|
86 | 86 | request_method='GET', |
|
87 | 87 | route_name='auth_home', |
|
88 | 88 | context=PamAuthnResource) |
|
89 | 89 | config.add_view( |
|
90 | 90 | 'rhodecode.authentication.views.AuthnPluginViewBase', |
|
91 | 91 | attr='settings_post', |
|
92 | 92 | renderer='rhodecode:templates/admin/auth/plugin_settings.mako', |
|
93 | 93 | request_method='POST', |
|
94 | 94 | route_name='auth_home', |
|
95 | 95 | context=PamAuthnResource) |
|
96 | 96 | |
|
97 | 97 | def get_display_name(self): |
|
98 | 98 | return _('PAM') |
|
99 | 99 | |
|
100 | 100 | @hybrid_property |
|
101 | 101 | def name(self): |
|
102 | 102 | return "pam" |
|
103 | 103 | |
|
104 | 104 | def get_settings_schema(self): |
|
105 | 105 | return PamSettingsSchema() |
|
106 | 106 | |
|
107 | 107 | def use_fake_password(self): |
|
108 | 108 | return True |
|
109 | 109 | |
|
110 | 110 | def auth(self, userobj, username, password, settings, **kwargs): |
|
111 | 111 | if not username or not password: |
|
112 | 112 | log.debug('Empty username or password skipping...') |
|
113 | 113 | return None |
|
114 | 114 | _pam = pam.pam() |
|
115 | 115 | auth_result = _pam.authenticate(username, password, settings["service"]) |
|
116 | 116 | |
|
117 | 117 | if not auth_result: |
|
118 |
log.error("PAM was unable to authenticate user: %s" |
|
|
118 | log.error("PAM was unable to authenticate user: %s", username) | |
|
119 | 119 | return None |
|
120 | 120 | |
|
121 |
log.debug('Got PAM response %s' |
|
|
121 | log.debug('Got PAM response %s', auth_result) | |
|
122 | 122 | |
|
123 | 123 | # old attrs fetched from RhodeCode database |
|
124 | 124 | default_email = "%s@%s" % (username, socket.gethostname()) |
|
125 | 125 | admin = getattr(userobj, 'admin', False) |
|
126 | 126 | active = getattr(userobj, 'active', True) |
|
127 | 127 | email = getattr(userobj, 'email', '') or default_email |
|
128 | 128 | username = getattr(userobj, 'username', username) |
|
129 | 129 | firstname = getattr(userobj, 'firstname', '') |
|
130 | 130 | lastname = getattr(userobj, 'lastname', '') |
|
131 | 131 | extern_type = getattr(userobj, 'extern_type', '') |
|
132 | 132 | |
|
133 | 133 | user_attrs = { |
|
134 | 134 | 'username': username, |
|
135 | 135 | 'firstname': firstname, |
|
136 | 136 | 'lastname': lastname, |
|
137 | 137 | 'groups': [g.gr_name for g in grp.getgrall() |
|
138 | 138 | if username in g.gr_mem], |
|
139 | 139 | 'user_group_sync': True, |
|
140 | 140 | 'email': email, |
|
141 | 141 | 'admin': admin, |
|
142 | 142 | 'active': active, |
|
143 | 143 | 'active_from_extern': None, |
|
144 | 144 | 'extern_name': username, |
|
145 | 145 | 'extern_type': extern_type, |
|
146 | 146 | } |
|
147 | 147 | |
|
148 | 148 | try: |
|
149 | 149 | user_data = pwd.getpwnam(username) |
|
150 | 150 | regex = settings["gecos"] |
|
151 | 151 | match = re.search(regex, user_data.pw_gecos) |
|
152 | 152 | if match: |
|
153 | 153 | user_attrs["firstname"] = match.group('first_name') |
|
154 | 154 | user_attrs["lastname"] = match.group('last_name') |
|
155 | 155 | except Exception: |
|
156 | 156 | log.warning("Cannot extract additional info for PAM user") |
|
157 | 157 | pass |
|
158 | 158 | |
|
159 | 159 | log.debug("pamuser: %s", user_attrs) |
|
160 |
log.info('user `%s` authenticated correctly' |
|
|
160 | log.info('user `%s` authenticated correctly', user_attrs['username']) | |
|
161 | 161 | return user_attrs |
@@ -1,143 +1,143 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2012-2018 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | RhodeCode authentication plugin for built in internal auth |
|
23 | 23 | """ |
|
24 | 24 | |
|
25 | 25 | import logging |
|
26 | 26 | |
|
27 | 27 | from rhodecode.translation import _ |
|
28 | 28 | |
|
29 | 29 | from rhodecode.authentication.base import RhodeCodeAuthPluginBase, hybrid_property |
|
30 | 30 | from rhodecode.authentication.routes import AuthnPluginResourceBase |
|
31 | 31 | from rhodecode.lib.utils2 import safe_str |
|
32 | 32 | from rhodecode.model.db import User |
|
33 | 33 | |
|
34 | 34 | log = logging.getLogger(__name__) |
|
35 | 35 | |
|
36 | 36 | |
|
37 | 37 | def plugin_factory(plugin_id, *args, **kwds): |
|
38 | 38 | plugin = RhodeCodeAuthPlugin(plugin_id) |
|
39 | 39 | return plugin |
|
40 | 40 | |
|
41 | 41 | |
|
42 | 42 | class RhodecodeAuthnResource(AuthnPluginResourceBase): |
|
43 | 43 | pass |
|
44 | 44 | |
|
45 | 45 | |
|
46 | 46 | class RhodeCodeAuthPlugin(RhodeCodeAuthPluginBase): |
|
47 | 47 | |
|
48 | 48 | def includeme(self, config): |
|
49 | 49 | config.add_authn_plugin(self) |
|
50 | 50 | config.add_authn_resource(self.get_id(), RhodecodeAuthnResource(self)) |
|
51 | 51 | config.add_view( |
|
52 | 52 | 'rhodecode.authentication.views.AuthnPluginViewBase', |
|
53 | 53 | attr='settings_get', |
|
54 | 54 | renderer='rhodecode:templates/admin/auth/plugin_settings.mako', |
|
55 | 55 | request_method='GET', |
|
56 | 56 | route_name='auth_home', |
|
57 | 57 | context=RhodecodeAuthnResource) |
|
58 | 58 | config.add_view( |
|
59 | 59 | 'rhodecode.authentication.views.AuthnPluginViewBase', |
|
60 | 60 | attr='settings_post', |
|
61 | 61 | renderer='rhodecode:templates/admin/auth/plugin_settings.mako', |
|
62 | 62 | request_method='POST', |
|
63 | 63 | route_name='auth_home', |
|
64 | 64 | context=RhodecodeAuthnResource) |
|
65 | 65 | |
|
66 | 66 | def get_display_name(self): |
|
67 | 67 | return _('Rhodecode') |
|
68 | 68 | |
|
69 | 69 | @hybrid_property |
|
70 | 70 | def name(self): |
|
71 | 71 | return "rhodecode" |
|
72 | 72 | |
|
73 | 73 | def user_activation_state(self): |
|
74 | 74 | def_user_perms = User.get_default_user().AuthUser().permissions['global'] |
|
75 | 75 | return 'hg.register.auto_activate' in def_user_perms |
|
76 | 76 | |
|
77 | 77 | def allows_authentication_from( |
|
78 | 78 | self, user, allows_non_existing_user=True, |
|
79 | 79 | allowed_auth_plugins=None, allowed_auth_sources=None): |
|
80 | 80 | """ |
|
81 | 81 | Custom method for this auth that doesn't accept non existing users. |
|
82 | 82 | We know that user exists in our database. |
|
83 | 83 | """ |
|
84 | 84 | allows_non_existing_user = False |
|
85 | 85 | return super(RhodeCodeAuthPlugin, self).allows_authentication_from( |
|
86 | 86 | user, allows_non_existing_user=allows_non_existing_user) |
|
87 | 87 | |
|
88 | 88 | def auth(self, userobj, username, password, settings, **kwargs): |
|
89 | 89 | if not userobj: |
|
90 |
log.debug('userobj was:%s skipping' |
|
|
90 | log.debug('userobj was:%s skipping', userobj) | |
|
91 | 91 | return None |
|
92 | 92 | if userobj.extern_type != self.name: |
|
93 | 93 | log.warning( |
|
94 |
"userobj:%s extern_type mismatch got:`%s` expected:`%s`" |
|
|
95 |
|
|
|
94 | "userobj:%s extern_type mismatch got:`%s` expected:`%s`", | |
|
95 | userobj, userobj.extern_type, self.name) | |
|
96 | 96 | return None |
|
97 | 97 | |
|
98 | 98 | user_attrs = { |
|
99 | 99 | "username": userobj.username, |
|
100 | 100 | "firstname": userobj.firstname, |
|
101 | 101 | "lastname": userobj.lastname, |
|
102 | 102 | "groups": [], |
|
103 | 103 | 'user_group_sync': False, |
|
104 | 104 | "email": userobj.email, |
|
105 | 105 | "admin": userobj.admin, |
|
106 | 106 | "active": userobj.active, |
|
107 | 107 | "active_from_extern": userobj.active, |
|
108 | 108 | "extern_name": userobj.user_id, |
|
109 | 109 | "extern_type": userobj.extern_type, |
|
110 | 110 | } |
|
111 | 111 | |
|
112 |
log.debug("User attributes:%s" |
|
|
112 | log.debug("User attributes:%s", user_attrs) | |
|
113 | 113 | if userobj.active: |
|
114 | 114 | from rhodecode.lib import auth |
|
115 | 115 | crypto_backend = auth.crypto_backend() |
|
116 | 116 | password_encoded = safe_str(password) |
|
117 | 117 | password_match, new_hash = crypto_backend.hash_check_with_upgrade( |
|
118 | 118 | password_encoded, userobj.password or '') |
|
119 | 119 | |
|
120 | 120 | if password_match and new_hash: |
|
121 | 121 | log.debug('user %s properly authenticated, but ' |
|
122 | 122 | 'requires hash change to bcrypt', userobj) |
|
123 | 123 | # if password match, and we use OLD deprecated hash, |
|
124 | 124 | # we should migrate this user hash password to the new hash |
|
125 | 125 | # we store the new returned by hash_check_with_upgrade function |
|
126 | 126 | user_attrs['_hash_migrate'] = new_hash |
|
127 | 127 | |
|
128 | 128 | if userobj.username == User.DEFAULT_USER and userobj.active: |
|
129 | 129 | log.info( |
|
130 | 130 | 'user `%s` authenticated correctly as anonymous user', userobj.username) |
|
131 | 131 | return user_attrs |
|
132 | 132 | |
|
133 | 133 | elif userobj.username == username and password_match: |
|
134 | 134 | log.info('user `%s` authenticated correctly', userobj.username) |
|
135 | 135 | return user_attrs |
|
136 | 136 | log.warn("user `%s` used a wrong password when " |
|
137 | 137 | "authenticating on this plugin", userobj.username) |
|
138 | 138 | return None |
|
139 | 139 | else: |
|
140 | 140 | log.warning( |
|
141 | 141 | 'user `%s` failed to authenticate via %s, reason: account not ' |
|
142 | 142 | 'active.', username, self.name) |
|
143 | 143 | return None |
@@ -1,147 +1,147 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2016-2018 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | RhodeCode authentication token plugin for built in internal auth |
|
23 | 23 | """ |
|
24 | 24 | |
|
25 | 25 | import logging |
|
26 | 26 | |
|
27 | 27 | from rhodecode.translation import _ |
|
28 | 28 | from rhodecode.authentication.base import ( |
|
29 | 29 | RhodeCodeAuthPluginBase, VCS_TYPE, hybrid_property) |
|
30 | 30 | from rhodecode.authentication.routes import AuthnPluginResourceBase |
|
31 | 31 | from rhodecode.model.db import User, UserApiKeys, Repository |
|
32 | 32 | |
|
33 | 33 | |
|
34 | 34 | log = logging.getLogger(__name__) |
|
35 | 35 | |
|
36 | 36 | |
|
37 | 37 | def plugin_factory(plugin_id, *args, **kwds): |
|
38 | 38 | plugin = RhodeCodeAuthPlugin(plugin_id) |
|
39 | 39 | return plugin |
|
40 | 40 | |
|
41 | 41 | |
|
42 | 42 | class RhodecodeAuthnResource(AuthnPluginResourceBase): |
|
43 | 43 | pass |
|
44 | 44 | |
|
45 | 45 | |
|
46 | 46 | class RhodeCodeAuthPlugin(RhodeCodeAuthPluginBase): |
|
47 | 47 | """ |
|
48 | 48 | Enables usage of authentication tokens for vcs operations. |
|
49 | 49 | """ |
|
50 | 50 | |
|
51 | 51 | def includeme(self, config): |
|
52 | 52 | config.add_authn_plugin(self) |
|
53 | 53 | config.add_authn_resource(self.get_id(), RhodecodeAuthnResource(self)) |
|
54 | 54 | config.add_view( |
|
55 | 55 | 'rhodecode.authentication.views.AuthnPluginViewBase', |
|
56 | 56 | attr='settings_get', |
|
57 | 57 | renderer='rhodecode:templates/admin/auth/plugin_settings.mako', |
|
58 | 58 | request_method='GET', |
|
59 | 59 | route_name='auth_home', |
|
60 | 60 | context=RhodecodeAuthnResource) |
|
61 | 61 | config.add_view( |
|
62 | 62 | 'rhodecode.authentication.views.AuthnPluginViewBase', |
|
63 | 63 | attr='settings_post', |
|
64 | 64 | renderer='rhodecode:templates/admin/auth/plugin_settings.mako', |
|
65 | 65 | request_method='POST', |
|
66 | 66 | route_name='auth_home', |
|
67 | 67 | context=RhodecodeAuthnResource) |
|
68 | 68 | |
|
69 | 69 | def get_display_name(self): |
|
70 | 70 | return _('Rhodecode Token Auth') |
|
71 | 71 | |
|
72 | 72 | @hybrid_property |
|
73 | 73 | def name(self): |
|
74 | 74 | return "authtoken" |
|
75 | 75 | |
|
76 | 76 | def user_activation_state(self): |
|
77 | 77 | def_user_perms = User.get_default_user().AuthUser().permissions['global'] |
|
78 | 78 | return 'hg.register.auto_activate' in def_user_perms |
|
79 | 79 | |
|
80 | 80 | def allows_authentication_from( |
|
81 | 81 | self, user, allows_non_existing_user=True, |
|
82 | 82 | allowed_auth_plugins=None, allowed_auth_sources=None): |
|
83 | 83 | """ |
|
84 | 84 | Custom method for this auth that doesn't accept empty users. And also |
|
85 | 85 | allows users from all other active plugins to use it and also |
|
86 | 86 | authenticate against it. But only via vcs mode |
|
87 | 87 | """ |
|
88 | 88 | from rhodecode.authentication.base import get_authn_registry |
|
89 | 89 | authn_registry = get_authn_registry() |
|
90 | 90 | |
|
91 | 91 | active_plugins = set( |
|
92 | 92 | [x.name for x in authn_registry.get_plugins_for_authentication()]) |
|
93 | 93 | active_plugins.discard(self.name) |
|
94 | 94 | |
|
95 | 95 | allowed_auth_plugins = [self.name] + list(active_plugins) |
|
96 | 96 | # only for vcs operations |
|
97 | 97 | allowed_auth_sources = [VCS_TYPE] |
|
98 | 98 | |
|
99 | 99 | return super(RhodeCodeAuthPlugin, self).allows_authentication_from( |
|
100 | 100 | user, allows_non_existing_user=False, |
|
101 | 101 | allowed_auth_plugins=allowed_auth_plugins, |
|
102 | 102 | allowed_auth_sources=allowed_auth_sources) |
|
103 | 103 | |
|
104 | 104 | def auth(self, userobj, username, password, settings, **kwargs): |
|
105 | 105 | if not userobj: |
|
106 |
log.debug('userobj was:%s skipping' |
|
|
106 | log.debug('userobj was:%s skipping', userobj) | |
|
107 | 107 | return None |
|
108 | 108 | |
|
109 | 109 | user_attrs = { |
|
110 | 110 | "username": userobj.username, |
|
111 | 111 | "firstname": userobj.firstname, |
|
112 | 112 | "lastname": userobj.lastname, |
|
113 | 113 | "groups": [], |
|
114 | 114 | 'user_group_sync': False, |
|
115 | 115 | "email": userobj.email, |
|
116 | 116 | "admin": userobj.admin, |
|
117 | 117 | "active": userobj.active, |
|
118 | 118 | "active_from_extern": userobj.active, |
|
119 | 119 | "extern_name": userobj.user_id, |
|
120 | 120 | "extern_type": userobj.extern_type, |
|
121 | 121 | } |
|
122 | 122 | |
|
123 | 123 | log.debug('Authenticating user with args %s', user_attrs) |
|
124 | 124 | if userobj.active: |
|
125 | 125 | # calling context repo for token scopes |
|
126 | 126 | scope_repo_id = None |
|
127 | 127 | if self.acl_repo_name: |
|
128 | 128 | repo = Repository.get_by_repo_name(self.acl_repo_name) |
|
129 | 129 | scope_repo_id = repo.repo_id if repo else None |
|
130 | 130 | |
|
131 | 131 | token_match = userobj.authenticate_by_token( |
|
132 | 132 | password, roles=[UserApiKeys.ROLE_VCS], |
|
133 | 133 | scope_repo_id=scope_repo_id) |
|
134 | 134 | |
|
135 | 135 | if userobj.username == username and token_match: |
|
136 | 136 | log.info( |
|
137 | 137 | 'user `%s` successfully authenticated via %s', |
|
138 | 138 | user_attrs['username'], self.name) |
|
139 | 139 | return user_attrs |
|
140 | 140 | log.warn( |
|
141 | 141 | 'user `%s` failed to authenticate via %s, reason: bad or ' |
|
142 | 142 | 'inactive token.', username, self.name) |
|
143 | 143 | else: |
|
144 | 144 | log.warning( |
|
145 | 145 | 'user `%s` failed to authenticate via %s, reason: account not ' |
|
146 | 146 | 'active.', username, self.name) |
|
147 | 147 | return None |
@@ -1,356 +1,356 b'' | |||
|
1 | 1 | # Copyright (C) 2016-2018 RhodeCode GmbH |
|
2 | 2 | # |
|
3 | 3 | # This program is free software: you can redistribute it and/or modify |
|
4 | 4 | # it under the terms of the GNU Affero General Public License, version 3 |
|
5 | 5 | # (only), as published by the Free Software Foundation. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU Affero General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | # |
|
15 | 15 | # This program is dual-licensed. If you wish to learn more about the |
|
16 | 16 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
17 | 17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
18 | 18 | |
|
19 | 19 | import collections |
|
20 | 20 | import logging |
|
21 | 21 | import datetime |
|
22 | 22 | |
|
23 | 23 | from rhodecode.translation import lazy_ugettext |
|
24 | 24 | from rhodecode.model.db import User, Repository, Session |
|
25 | 25 | from rhodecode.events.base import RhodeCodeIntegrationEvent |
|
26 | 26 | from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError |
|
27 | 27 | |
|
28 | 28 | log = logging.getLogger(__name__) |
|
29 | 29 | |
|
30 | 30 | |
|
31 | 31 | def _commits_as_dict(event, commit_ids, repos): |
|
32 | 32 | """ |
|
33 | 33 | Helper function to serialize commit_ids |
|
34 | 34 | |
|
35 | 35 | :param event: class calling this method |
|
36 | 36 | :param commit_ids: commits to get |
|
37 | 37 | :param repos: list of repos to check |
|
38 | 38 | """ |
|
39 | 39 | from rhodecode.lib.utils2 import extract_mentioned_users |
|
40 | 40 | from rhodecode.lib.helpers import ( |
|
41 | 41 | urlify_commit_message, process_patterns, chop_at_smart) |
|
42 | 42 | from rhodecode.model.repo import RepoModel |
|
43 | 43 | |
|
44 | 44 | if not repos: |
|
45 | 45 | raise Exception('no repo defined') |
|
46 | 46 | |
|
47 | 47 | if not isinstance(repos, (tuple, list)): |
|
48 | 48 | repos = [repos] |
|
49 | 49 | |
|
50 | 50 | if not commit_ids: |
|
51 | 51 | return [] |
|
52 | 52 | |
|
53 | 53 | needed_commits = list(commit_ids) |
|
54 | 54 | |
|
55 | 55 | commits = [] |
|
56 | 56 | reviewers = [] |
|
57 | 57 | for repo in repos: |
|
58 | 58 | if not needed_commits: |
|
59 | 59 | return commits # return early if we have the commits we need |
|
60 | 60 | |
|
61 | 61 | vcs_repo = repo.scm_instance(cache=False) |
|
62 | 62 | |
|
63 | 63 | try: |
|
64 | 64 | # use copy of needed_commits since we modify it while iterating |
|
65 | 65 | for commit_id in list(needed_commits): |
|
66 | 66 | if commit_id.startswith('tag=>'): |
|
67 | 67 | raw_id = commit_id[5:] |
|
68 | 68 | cs_data = { |
|
69 | 69 | 'raw_id': commit_id, 'short_id': commit_id, |
|
70 | 70 | 'branch': None, |
|
71 | 71 | 'git_ref_change': 'tag_add', |
|
72 | 72 | 'message': 'Added new tag {}'.format(raw_id), |
|
73 | 73 | 'author': event.actor.full_contact, |
|
74 | 74 | 'date': datetime.datetime.now(), |
|
75 | 75 | 'refs': { |
|
76 | 76 | 'branches': [], |
|
77 | 77 | 'bookmarks': [], |
|
78 | 78 | 'tags': [] |
|
79 | 79 | } |
|
80 | 80 | } |
|
81 | 81 | commits.append(cs_data) |
|
82 | 82 | |
|
83 | 83 | elif commit_id.startswith('delete_branch=>'): |
|
84 | 84 | raw_id = commit_id[15:] |
|
85 | 85 | cs_data = { |
|
86 | 86 | 'raw_id': commit_id, 'short_id': commit_id, |
|
87 | 87 | 'branch': None, |
|
88 | 88 | 'git_ref_change': 'branch_delete', |
|
89 | 89 | 'message': 'Deleted branch {}'.format(raw_id), |
|
90 | 90 | 'author': event.actor.full_contact, |
|
91 | 91 | 'date': datetime.datetime.now(), |
|
92 | 92 | 'refs': { |
|
93 | 93 | 'branches': [], |
|
94 | 94 | 'bookmarks': [], |
|
95 | 95 | 'tags': [] |
|
96 | 96 | } |
|
97 | 97 | } |
|
98 | 98 | commits.append(cs_data) |
|
99 | 99 | |
|
100 | 100 | else: |
|
101 | 101 | try: |
|
102 | 102 | cs = vcs_repo.get_changeset(commit_id) |
|
103 | 103 | except CommitDoesNotExistError: |
|
104 | 104 | continue # maybe its in next repo |
|
105 | 105 | |
|
106 | 106 | cs_data = cs.__json__() |
|
107 | 107 | cs_data['refs'] = cs._get_refs() |
|
108 | 108 | |
|
109 | 109 | cs_data['mentions'] = extract_mentioned_users(cs_data['message']) |
|
110 | 110 | cs_data['reviewers'] = reviewers |
|
111 | 111 | cs_data['url'] = RepoModel().get_commit_url( |
|
112 | 112 | repo, cs_data['raw_id'], request=event.request) |
|
113 | 113 | cs_data['permalink_url'] = RepoModel().get_commit_url( |
|
114 | 114 | repo, cs_data['raw_id'], request=event.request, |
|
115 | 115 | permalink=True) |
|
116 | 116 | urlified_message, issues_data = process_patterns( |
|
117 | 117 | cs_data['message'], repo.repo_name) |
|
118 | 118 | cs_data['issues'] = issues_data |
|
119 | 119 | cs_data['message_html'] = urlify_commit_message( |
|
120 | 120 | cs_data['message'], repo.repo_name) |
|
121 | 121 | cs_data['message_html_title'] = chop_at_smart( |
|
122 | 122 | cs_data['message'], '\n', suffix_if_chopped='...') |
|
123 | 123 | commits.append(cs_data) |
|
124 | 124 | |
|
125 | 125 | needed_commits.remove(commit_id) |
|
126 | 126 | |
|
127 | 127 | except Exception: |
|
128 | 128 | log.exception('Failed to extract commits data') |
|
129 | 129 | # we don't send any commits when crash happens, only full list |
|
130 | 130 | # matters we short circuit then. |
|
131 | 131 | return [] |
|
132 | 132 | |
|
133 | 133 | missing_commits = set(commit_ids) - set(c['raw_id'] for c in commits) |
|
134 | 134 | if missing_commits: |
|
135 | 135 | log.error('Inconsistent repository state. ' |
|
136 |
'Missing commits: %s' |
|
|
136 | 'Missing commits: %s', ', '.join(missing_commits)) | |
|
137 | 137 | |
|
138 | 138 | return commits |
|
139 | 139 | |
|
140 | 140 | |
|
141 | 141 | def _issues_as_dict(commits): |
|
142 | 142 | """ Helper function to serialize issues from commits """ |
|
143 | 143 | issues = {} |
|
144 | 144 | for commit in commits: |
|
145 | 145 | for issue in commit['issues']: |
|
146 | 146 | issues[issue['id']] = issue |
|
147 | 147 | return issues |
|
148 | 148 | |
|
149 | 149 | |
|
150 | 150 | class RepoEvent(RhodeCodeIntegrationEvent): |
|
151 | 151 | """ |
|
152 | 152 | Base class for events acting on a repository. |
|
153 | 153 | |
|
154 | 154 | :param repo: a :class:`Repository` instance |
|
155 | 155 | """ |
|
156 | 156 | |
|
157 | 157 | def __init__(self, repo): |
|
158 | 158 | super(RepoEvent, self).__init__() |
|
159 | 159 | self.repo = repo |
|
160 | 160 | |
|
161 | 161 | def as_dict(self): |
|
162 | 162 | from rhodecode.model.repo import RepoModel |
|
163 | 163 | data = super(RepoEvent, self).as_dict() |
|
164 | 164 | |
|
165 | 165 | extra_fields = collections.OrderedDict() |
|
166 | 166 | for field in self.repo.extra_fields: |
|
167 | 167 | extra_fields[field.field_key] = field.field_value |
|
168 | 168 | |
|
169 | 169 | data.update({ |
|
170 | 170 | 'repo': { |
|
171 | 171 | 'repo_id': self.repo.repo_id, |
|
172 | 172 | 'repo_name': self.repo.repo_name, |
|
173 | 173 | 'repo_type': self.repo.repo_type, |
|
174 | 174 | 'url': RepoModel().get_url( |
|
175 | 175 | self.repo, request=self.request), |
|
176 | 176 | 'permalink_url': RepoModel().get_url( |
|
177 | 177 | self.repo, request=self.request, permalink=True), |
|
178 | 178 | 'extra_fields': extra_fields |
|
179 | 179 | } |
|
180 | 180 | }) |
|
181 | 181 | return data |
|
182 | 182 | |
|
183 | 183 | |
|
184 | 184 | class RepoPreCreateEvent(RepoEvent): |
|
185 | 185 | """ |
|
186 | 186 | An instance of this class is emitted as an :term:`event` before a repo is |
|
187 | 187 | created. |
|
188 | 188 | """ |
|
189 | 189 | name = 'repo-pre-create' |
|
190 | 190 | display_name = lazy_ugettext('repository pre create') |
|
191 | 191 | |
|
192 | 192 | |
|
193 | 193 | class RepoCreateEvent(RepoEvent): |
|
194 | 194 | """ |
|
195 | 195 | An instance of this class is emitted as an :term:`event` whenever a repo is |
|
196 | 196 | created. |
|
197 | 197 | """ |
|
198 | 198 | name = 'repo-create' |
|
199 | 199 | display_name = lazy_ugettext('repository created') |
|
200 | 200 | |
|
201 | 201 | |
|
202 | 202 | class RepoPreDeleteEvent(RepoEvent): |
|
203 | 203 | """ |
|
204 | 204 | An instance of this class is emitted as an :term:`event` whenever a repo is |
|
205 | 205 | created. |
|
206 | 206 | """ |
|
207 | 207 | name = 'repo-pre-delete' |
|
208 | 208 | display_name = lazy_ugettext('repository pre delete') |
|
209 | 209 | |
|
210 | 210 | |
|
211 | 211 | class RepoDeleteEvent(RepoEvent): |
|
212 | 212 | """ |
|
213 | 213 | An instance of this class is emitted as an :term:`event` whenever a repo is |
|
214 | 214 | created. |
|
215 | 215 | """ |
|
216 | 216 | name = 'repo-delete' |
|
217 | 217 | display_name = lazy_ugettext('repository deleted') |
|
218 | 218 | |
|
219 | 219 | |
|
220 | 220 | class RepoVCSEvent(RepoEvent): |
|
221 | 221 | """ |
|
222 | 222 | Base class for events triggered by the VCS |
|
223 | 223 | """ |
|
224 | 224 | def __init__(self, repo_name, extras): |
|
225 | 225 | self.repo = Repository.get_by_repo_name(repo_name) |
|
226 | 226 | if not self.repo: |
|
227 | 227 | raise Exception('repo by this name %s does not exist' % repo_name) |
|
228 | 228 | self.extras = extras |
|
229 | 229 | super(RepoVCSEvent, self).__init__(self.repo) |
|
230 | 230 | |
|
231 | 231 | @property |
|
232 | 232 | def actor(self): |
|
233 | 233 | if self.extras.get('username'): |
|
234 | 234 | return User.get_by_username(self.extras['username']) |
|
235 | 235 | |
|
236 | 236 | @property |
|
237 | 237 | def actor_ip(self): |
|
238 | 238 | if self.extras.get('ip'): |
|
239 | 239 | return self.extras['ip'] |
|
240 | 240 | |
|
241 | 241 | @property |
|
242 | 242 | def server_url(self): |
|
243 | 243 | if self.extras.get('server_url'): |
|
244 | 244 | return self.extras['server_url'] |
|
245 | 245 | |
|
246 | 246 | @property |
|
247 | 247 | def request(self): |
|
248 | 248 | return self.extras.get('request') or self.get_request() |
|
249 | 249 | |
|
250 | 250 | |
|
251 | 251 | class RepoPrePullEvent(RepoVCSEvent): |
|
252 | 252 | """ |
|
253 | 253 | An instance of this class is emitted as an :term:`event` before commits |
|
254 | 254 | are pulled from a repo. |
|
255 | 255 | """ |
|
256 | 256 | name = 'repo-pre-pull' |
|
257 | 257 | display_name = lazy_ugettext('repository pre pull') |
|
258 | 258 | |
|
259 | 259 | |
|
260 | 260 | class RepoPullEvent(RepoVCSEvent): |
|
261 | 261 | """ |
|
262 | 262 | An instance of this class is emitted as an :term:`event` after commits |
|
263 | 263 | are pulled from a repo. |
|
264 | 264 | """ |
|
265 | 265 | name = 'repo-pull' |
|
266 | 266 | display_name = lazy_ugettext('repository pull') |
|
267 | 267 | |
|
268 | 268 | |
|
269 | 269 | class RepoPrePushEvent(RepoVCSEvent): |
|
270 | 270 | """ |
|
271 | 271 | An instance of this class is emitted as an :term:`event` before commits |
|
272 | 272 | are pushed to a repo. |
|
273 | 273 | """ |
|
274 | 274 | name = 'repo-pre-push' |
|
275 | 275 | display_name = lazy_ugettext('repository pre push') |
|
276 | 276 | |
|
277 | 277 | |
|
278 | 278 | class RepoPushEvent(RepoVCSEvent): |
|
279 | 279 | """ |
|
280 | 280 | An instance of this class is emitted as an :term:`event` after commits |
|
281 | 281 | are pushed to a repo. |
|
282 | 282 | |
|
283 | 283 | :param extras: (optional) dict of data from proxied VCS actions |
|
284 | 284 | """ |
|
285 | 285 | name = 'repo-push' |
|
286 | 286 | display_name = lazy_ugettext('repository push') |
|
287 | 287 | |
|
288 | 288 | def __init__(self, repo_name, pushed_commit_ids, extras): |
|
289 | 289 | super(RepoPushEvent, self).__init__(repo_name, extras) |
|
290 | 290 | self.pushed_commit_ids = pushed_commit_ids |
|
291 | 291 | self.new_refs = extras.new_refs |
|
292 | 292 | |
|
293 | 293 | def as_dict(self): |
|
294 | 294 | data = super(RepoPushEvent, self).as_dict() |
|
295 | 295 | |
|
296 | 296 | def branch_url(branch_name): |
|
297 | 297 | return '{}/changelog?branch={}'.format( |
|
298 | 298 | data['repo']['url'], branch_name) |
|
299 | 299 | |
|
300 | 300 | def tag_url(tag_name): |
|
301 | 301 | return '{}/files/{}/'.format( |
|
302 | 302 | data['repo']['url'], tag_name) |
|
303 | 303 | |
|
304 | 304 | commits = _commits_as_dict( |
|
305 | 305 | self, commit_ids=self.pushed_commit_ids, repos=[self.repo]) |
|
306 | 306 | |
|
307 | 307 | last_branch = None |
|
308 | 308 | for commit in reversed(commits): |
|
309 | 309 | commit['branch'] = commit['branch'] or last_branch |
|
310 | 310 | last_branch = commit['branch'] |
|
311 | 311 | issues = _issues_as_dict(commits) |
|
312 | 312 | |
|
313 | 313 | branches = set() |
|
314 | 314 | tags = set() |
|
315 | 315 | for commit in commits: |
|
316 | 316 | if commit['refs']['tags']: |
|
317 | 317 | for tag in commit['refs']['tags']: |
|
318 | 318 | tags.add(tag) |
|
319 | 319 | if commit['branch']: |
|
320 | 320 | branches.add(commit['branch']) |
|
321 | 321 | |
|
322 | 322 | # maybe we have branches in new_refs ? |
|
323 | 323 | try: |
|
324 | 324 | branches = branches.union(set(self.new_refs['branches'])) |
|
325 | 325 | except Exception: |
|
326 | 326 | pass |
|
327 | 327 | |
|
328 | 328 | branches = [ |
|
329 | 329 | { |
|
330 | 330 | 'name': branch, |
|
331 | 331 | 'url': branch_url(branch) |
|
332 | 332 | } |
|
333 | 333 | for branch in branches |
|
334 | 334 | ] |
|
335 | 335 | |
|
336 | 336 | # maybe we have branches in new_refs ? |
|
337 | 337 | try: |
|
338 | 338 | tags = tags.union(set(self.new_refs['tags'])) |
|
339 | 339 | except Exception: |
|
340 | 340 | pass |
|
341 | 341 | |
|
342 | 342 | tags = [ |
|
343 | 343 | { |
|
344 | 344 | 'name': tag, |
|
345 | 345 | 'url': tag_url(tag) |
|
346 | 346 | } |
|
347 | 347 | for tag in tags |
|
348 | 348 | ] |
|
349 | 349 | |
|
350 | 350 | data['push'] = { |
|
351 | 351 | 'commits': commits, |
|
352 | 352 | 'issues': issues, |
|
353 | 353 | 'branches': branches, |
|
354 | 354 | 'tags': tags, |
|
355 | 355 | } |
|
356 | 356 | return data |
@@ -1,74 +1,74 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2012-2018 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | import sys |
|
21 | 21 | import logging |
|
22 | 22 | |
|
23 | 23 | from rhodecode.integrations.registry import IntegrationTypeRegistry |
|
24 | 24 | from rhodecode.integrations.types import webhook, slack, hipchat, email, base |
|
25 | 25 | from rhodecode.lib.exc_tracking import store_exception |
|
26 | 26 | |
|
27 | 27 | log = logging.getLogger(__name__) |
|
28 | 28 | |
|
29 | 29 | |
|
30 | 30 | # TODO: dan: This is currently global until we figure out what to do about |
|
31 | 31 | # VCS's not having a pyramid context - move it to pyramid app configuration |
|
32 | 32 | # includeme level later to allow per instance integration setup |
|
33 | 33 | integration_type_registry = IntegrationTypeRegistry() |
|
34 | 34 | |
|
35 | 35 | integration_type_registry.register_integration_type( |
|
36 | 36 | webhook.WebhookIntegrationType) |
|
37 | 37 | integration_type_registry.register_integration_type( |
|
38 | 38 | slack.SlackIntegrationType) |
|
39 | 39 | integration_type_registry.register_integration_type( |
|
40 | 40 | hipchat.HipchatIntegrationType) |
|
41 | 41 | integration_type_registry.register_integration_type( |
|
42 | 42 | email.EmailIntegrationType) |
|
43 | 43 | |
|
44 | 44 | |
|
45 | 45 | # dummy EE integration to show users what we have in EE edition |
|
46 | 46 | integration_type_registry.register_integration_type( |
|
47 | 47 | base.EEIntegration('Jira Issues integration', 'jira')) |
|
48 | 48 | integration_type_registry.register_integration_type( |
|
49 | 49 | base.EEIntegration('Redmine Tracker integration', 'redmine')) |
|
50 | 50 | integration_type_registry.register_integration_type( |
|
51 | 51 | base.EEIntegration('Jenkins CI integration', 'jenkins')) |
|
52 | 52 | |
|
53 | 53 | |
|
54 | 54 | def integrations_event_handler(event): |
|
55 | 55 | """ |
|
56 | 56 | Takes an event and passes it to all enabled integrations |
|
57 | 57 | """ |
|
58 | 58 | from rhodecode.model.integration import IntegrationModel |
|
59 | 59 | |
|
60 | 60 | integration_model = IntegrationModel() |
|
61 | 61 | integrations = integration_model.get_for_event(event) |
|
62 | 62 | for integration in integrations: |
|
63 | 63 | try: |
|
64 | 64 | integration_model.send_event(integration, event) |
|
65 | 65 | except Exception: |
|
66 | 66 | exc_info = sys.exc_info() |
|
67 | 67 | store_exception(id(exc_info), exc_info) |
|
68 | 68 | log.exception( |
|
69 |
'failure occurred when sending event %s to integration %s' |
|
|
70 |
|
|
|
69 | 'failure occurred when sending event %s to integration %s', | |
|
70 | event, integration) | |
|
71 | 71 | |
|
72 | 72 | |
|
73 | 73 | def includeme(config): |
|
74 | 74 | config.include('rhodecode.integrations.routes') |
@@ -1,38 +1,37 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # Copyright (C) 2012-2018 RhodeCode GmbH |
|
3 | 3 | # |
|
4 | 4 | # This program is free software: you can redistribute it and/or modify |
|
5 | 5 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | 6 | # (only), as published by the Free Software Foundation. |
|
7 | 7 | # |
|
8 | 8 | # This program is distributed in the hope that it will be useful, |
|
9 | 9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | 10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | 11 | # GNU General Public License for more details. |
|
12 | 12 | # |
|
13 | 13 | # You should have received a copy of the GNU Affero General Public License |
|
14 | 14 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | 15 | # |
|
16 | 16 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | 17 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | 18 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | 19 | |
|
20 | 20 | import logging |
|
21 | 21 | import collections |
|
22 | 22 | |
|
23 | 23 | log = logging.getLogger(__name__) |
|
24 | 24 | |
|
25 | 25 | |
|
26 | 26 | class IntegrationTypeRegistry(collections.OrderedDict): |
|
27 | 27 | """ |
|
28 | 28 | Registry Class to hold IntegrationTypes |
|
29 | 29 | """ |
|
30 | 30 | def register_integration_type(self, IntegrationType): |
|
31 | 31 | key = IntegrationType.key |
|
32 | 32 | if key in self: |
|
33 | 33 | log.debug( |
|
34 |
'Overriding existing integration type %s (%s) with %s' |
|
|
35 |
|
|
|
34 | 'Overriding existing integration type %s (%s) with %s', | |
|
35 | self[key], key, IntegrationType) | |
|
36 | 36 | |
|
37 | 37 | self[key] = IntegrationType |
|
38 |
@@ -1,253 +1,253 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2012-2018 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | from __future__ import unicode_literals |
|
22 | 22 | import deform |
|
23 | 23 | import logging |
|
24 | 24 | import requests |
|
25 | 25 | import colander |
|
26 | 26 | import textwrap |
|
27 | 27 | from mako.template import Template |
|
28 | 28 | from rhodecode import events |
|
29 | 29 | from rhodecode.translation import _ |
|
30 | 30 | from rhodecode.lib import helpers as h |
|
31 | 31 | from rhodecode.lib.celerylib import run_task, async_task, RequestContextTask |
|
32 | 32 | from rhodecode.lib.colander_utils import strip_whitespace |
|
33 | 33 | from rhodecode.integrations.types.base import ( |
|
34 | 34 | IntegrationTypeBase, CommitParsingDataHandler, render_with_traceback) |
|
35 | 35 | |
|
36 | 36 | log = logging.getLogger(__name__) |
|
37 | 37 | |
|
38 | 38 | |
|
39 | 39 | class HipchatSettingsSchema(colander.Schema): |
|
40 | 40 | color_choices = [ |
|
41 | 41 | ('yellow', _('Yellow')), |
|
42 | 42 | ('red', _('Red')), |
|
43 | 43 | ('green', _('Green')), |
|
44 | 44 | ('purple', _('Purple')), |
|
45 | 45 | ('gray', _('Gray')), |
|
46 | 46 | ] |
|
47 | 47 | |
|
48 | 48 | server_url = colander.SchemaNode( |
|
49 | 49 | colander.String(), |
|
50 | 50 | title=_('Hipchat server URL'), |
|
51 | 51 | description=_('Hipchat integration url.'), |
|
52 | 52 | default='', |
|
53 | 53 | preparer=strip_whitespace, |
|
54 | 54 | validator=colander.url, |
|
55 | 55 | widget=deform.widget.TextInputWidget( |
|
56 | 56 | placeholder='https://?.hipchat.com/v2/room/?/notification?auth_token=?', |
|
57 | 57 | ), |
|
58 | 58 | ) |
|
59 | 59 | notify = colander.SchemaNode( |
|
60 | 60 | colander.Bool(), |
|
61 | 61 | title=_('Notify'), |
|
62 | 62 | description=_('Make a notification to the users in room.'), |
|
63 | 63 | missing=False, |
|
64 | 64 | default=False, |
|
65 | 65 | ) |
|
66 | 66 | color = colander.SchemaNode( |
|
67 | 67 | colander.String(), |
|
68 | 68 | title=_('Color'), |
|
69 | 69 | description=_('Background color of message.'), |
|
70 | 70 | missing='', |
|
71 | 71 | validator=colander.OneOf([x[0] for x in color_choices]), |
|
72 | 72 | widget=deform.widget.Select2Widget( |
|
73 | 73 | values=color_choices, |
|
74 | 74 | ), |
|
75 | 75 | ) |
|
76 | 76 | |
|
77 | 77 | |
|
78 | 78 | repo_push_template = Template(''' |
|
79 | 79 | <b>${data['actor']['username']}</b> pushed to repo <a href="${data['repo']['url']}">${data['repo']['repo_name']}</a>: |
|
80 | 80 | <br> |
|
81 | 81 | <ul> |
|
82 | 82 | %for branch, branch_commits in branches_commits.items(): |
|
83 | 83 | <li> |
|
84 | 84 | % if branch: |
|
85 | 85 | <a href="${branch_commits['branch']['url']}">branch: ${branch_commits['branch']['name']}</a> |
|
86 | 86 | % else: |
|
87 | 87 | to trunk |
|
88 | 88 | % endif |
|
89 | 89 | <ul> |
|
90 | 90 | % for commit in branch_commits['commits']: |
|
91 | 91 | <li><a href="${commit['url']}">${commit['short_id']}</a> - ${commit['message_html']}</li> |
|
92 | 92 | % endfor |
|
93 | 93 | </ul> |
|
94 | 94 | </li> |
|
95 | 95 | %endfor |
|
96 | 96 | ''') |
|
97 | 97 | |
|
98 | 98 | |
|
99 | 99 | class HipchatIntegrationType(IntegrationTypeBase, CommitParsingDataHandler): |
|
100 | 100 | key = 'hipchat' |
|
101 | 101 | display_name = _('Hipchat') |
|
102 | 102 | description = _('Send events such as repo pushes and pull requests to ' |
|
103 | 103 | 'your hipchat channel.') |
|
104 | 104 | |
|
105 | 105 | @classmethod |
|
106 | 106 | def icon(cls): |
|
107 | 107 | return '''<?xml version="1.0" encoding="utf-8"?><!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px" viewBox="0 0 1000 1000" enable-background="new 0 0 1000 1000" xml:space="preserve"><g><g transform="translate(0.000000,511.000000) scale(0.100000,-0.100000)"><path fill="#205281" d="M4197.1,4662.4c-1661.5-260.4-3018-1171.6-3682.6-2473.3C219.9,1613.6,100,1120.3,100,462.6c0-1014,376.8-1918.4,1127-2699.4C2326.7-3377.6,3878.5-3898.3,5701-3730.5l486.5,44.5l208.9-123.3c637.2-373.4,1551.8-640.6,2240.4-650.9c304.9-6.9,335.7,0,417.9,75.4c185,174.7,147.3,411.1-89.1,548.1c-315.2,181.6-620,544.7-733.1,870.1l-51.4,157.6l472.7,472.7c349.4,349.4,520.7,551.5,657.7,774.2c784.5,1281.2,784.5,2788.5,0,4052.6c-236.4,376.8-794.8,966-1178.4,1236.7c-572.1,407.7-1264.1,709.1-1993.7,870.1c-267.2,58.2-479.6,75.4-1038,82.2C4714.4,4686.4,4310.2,4679.6,4197.1,4662.4z M5947.6,3740.9c1856.7-380.3,3127.6-1709.4,3127.6-3275c0-1000.3-534.4-1949.2-1466.2-2600.1c-188.4-133.6-287.8-226.1-301.5-284.4c-41.1-157.6,263.8-938.6,397.4-1020.8c20.5-10.3,34.3-44.5,34.3-75.4c0-167.8-811.9,195.3-1363.4,609.8l-181.6,137l-332.3-58.2c-445.3-78.8-1281.2-78.8-1702.6,0C2796-2569.2,1734.1-1832.6,1220.2-801.5C983.8-318.5,905,51.5,929,613.3c27.4,640.6,243.2,1192.1,685.1,1740.3c620,770.8,1661.5,1305.2,2822.8,1452.5C4806.9,3854,5553.7,3819.7,5947.6,3740.9z"/><path fill="#205281" d="M2381.5-345.9c-75.4-106.2-68.5-167.8,34.3-322c332.3-500.2,1010.6-928.4,1760.8-1120.2c417.9-106.2,1226.4-106.2,1644.3,0c712.5,181.6,1270.9,517.3,1685.4,1014C7681-561.7,7715.3-424.7,7616-325.4c-89.1,89.1-167.9,65.1-431.7-133.6c-835.8-630.3-2028-856.4-3086.5-585.8C3683.3-938.6,3142-685,2830.3-448.7C2576.8-253.4,2463.7-229.4,2381.5-345.9z"/></g></g><!-- Svg Vector Icons : http://www.onlinewebfonts.com/icon --></svg>''' |
|
108 | 108 | |
|
109 | 109 | valid_events = [ |
|
110 | 110 | events.PullRequestCloseEvent, |
|
111 | 111 | events.PullRequestMergeEvent, |
|
112 | 112 | events.PullRequestUpdateEvent, |
|
113 | 113 | events.PullRequestCommentEvent, |
|
114 | 114 | events.PullRequestReviewEvent, |
|
115 | 115 | events.PullRequestCreateEvent, |
|
116 | 116 | events.RepoPushEvent, |
|
117 | 117 | events.RepoCreateEvent, |
|
118 | 118 | ] |
|
119 | 119 | |
|
120 | 120 | def send_event(self, event): |
|
121 | 121 | if event.__class__ not in self.valid_events: |
|
122 |
log.debug('event not valid: %r' |
|
|
122 | log.debug('event not valid: %r', event) | |
|
123 | 123 | return |
|
124 | 124 | |
|
125 | 125 | if event.name not in self.settings['events']: |
|
126 |
log.debug('event ignored: %r' |
|
|
126 | log.debug('event ignored: %r', event) | |
|
127 | 127 | return |
|
128 | 128 | |
|
129 | 129 | data = event.as_dict() |
|
130 | 130 | |
|
131 | 131 | text = '<b>%s<b> caused a <b>%s</b> event' % ( |
|
132 | 132 | data['actor']['username'], event.name) |
|
133 | 133 | |
|
134 |
log.debug('handling hipchat event for %s' |
|
|
134 | log.debug('handling hipchat event for %s', event.name) | |
|
135 | 135 | |
|
136 | 136 | if isinstance(event, events.PullRequestCommentEvent): |
|
137 | 137 | text = self.format_pull_request_comment_event(event, data) |
|
138 | 138 | elif isinstance(event, events.PullRequestReviewEvent): |
|
139 | 139 | text = self.format_pull_request_review_event(event, data) |
|
140 | 140 | elif isinstance(event, events.PullRequestEvent): |
|
141 | 141 | text = self.format_pull_request_event(event, data) |
|
142 | 142 | elif isinstance(event, events.RepoPushEvent): |
|
143 | 143 | text = self.format_repo_push_event(data) |
|
144 | 144 | elif isinstance(event, events.RepoCreateEvent): |
|
145 | 145 | text = self.format_repo_create_event(data) |
|
146 | 146 | else: |
|
147 |
log.error('unhandled event type: %r' |
|
|
147 | log.error('unhandled event type: %r', event) | |
|
148 | 148 | |
|
149 | 149 | run_task(post_text_to_hipchat, self.settings, text) |
|
150 | 150 | |
|
151 | 151 | def settings_schema(self): |
|
152 | 152 | schema = HipchatSettingsSchema() |
|
153 | 153 | schema.add(colander.SchemaNode( |
|
154 | 154 | colander.Set(), |
|
155 | 155 | widget=deform.widget.CheckboxChoiceWidget( |
|
156 | 156 | values=sorted( |
|
157 | 157 | [(e.name, e.display_name) for e in self.valid_events] |
|
158 | 158 | ) |
|
159 | 159 | ), |
|
160 | 160 | description="Events activated for this integration", |
|
161 | 161 | name='events' |
|
162 | 162 | )) |
|
163 | 163 | |
|
164 | 164 | return schema |
|
165 | 165 | |
|
166 | 166 | def format_pull_request_comment_event(self, event, data): |
|
167 | 167 | comment_text = data['comment']['text'] |
|
168 | 168 | if len(comment_text) > 200: |
|
169 | 169 | comment_text = '{comment_text}<a href="{comment_url}">...<a/>'.format( |
|
170 | 170 | comment_text=h.html_escape(comment_text[:200]), |
|
171 | 171 | comment_url=data['comment']['url'], |
|
172 | 172 | ) |
|
173 | 173 | |
|
174 | 174 | comment_status = '' |
|
175 | 175 | if data['comment']['status']: |
|
176 | 176 | comment_status = '[{}]: '.format(data['comment']['status']) |
|
177 | 177 | |
|
178 | 178 | return (textwrap.dedent( |
|
179 | 179 | ''' |
|
180 | 180 | {user} commented on pull request <a href="{pr_url}">{number}</a> - {pr_title}: |
|
181 | 181 | >>> {comment_status}{comment_text} |
|
182 | 182 | ''').format( |
|
183 | 183 | comment_status=comment_status, |
|
184 | 184 | user=data['actor']['username'], |
|
185 | 185 | number=data['pullrequest']['pull_request_id'], |
|
186 | 186 | pr_url=data['pullrequest']['url'], |
|
187 | 187 | pr_status=data['pullrequest']['status'], |
|
188 | 188 | pr_title=h.html_escape(data['pullrequest']['title']), |
|
189 | 189 | comment_text=h.html_escape(comment_text) |
|
190 | 190 | ) |
|
191 | 191 | ) |
|
192 | 192 | |
|
193 | 193 | def format_pull_request_review_event(self, event, data): |
|
194 | 194 | return (textwrap.dedent( |
|
195 | 195 | ''' |
|
196 | 196 | Status changed to {pr_status} for pull request <a href="{pr_url}">#{number}</a> - {pr_title} |
|
197 | 197 | ''').format( |
|
198 | 198 | user=data['actor']['username'], |
|
199 | 199 | number=data['pullrequest']['pull_request_id'], |
|
200 | 200 | pr_url=data['pullrequest']['url'], |
|
201 | 201 | pr_status=data['pullrequest']['status'], |
|
202 | 202 | pr_title=h.html_escape(data['pullrequest']['title']), |
|
203 | 203 | ) |
|
204 | 204 | ) |
|
205 | 205 | |
|
206 | 206 | def format_pull_request_event(self, event, data): |
|
207 | 207 | action = { |
|
208 | 208 | events.PullRequestCloseEvent: 'closed', |
|
209 | 209 | events.PullRequestMergeEvent: 'merged', |
|
210 | 210 | events.PullRequestUpdateEvent: 'updated', |
|
211 | 211 | events.PullRequestCreateEvent: 'created', |
|
212 | 212 | }.get(event.__class__, str(event.__class__)) |
|
213 | 213 | |
|
214 | 214 | return ('Pull request <a href="{url}">#{number}</a> - {title} ' |
|
215 | 215 | '{action} by <b>{user}</b>').format( |
|
216 | 216 | user=data['actor']['username'], |
|
217 | 217 | number=data['pullrequest']['pull_request_id'], |
|
218 | 218 | url=data['pullrequest']['url'], |
|
219 | 219 | title=h.html_escape(data['pullrequest']['title']), |
|
220 | 220 | action=action |
|
221 | 221 | ) |
|
222 | 222 | |
|
223 | 223 | def format_repo_push_event(self, data): |
|
224 | 224 | branches_commits = self.aggregate_branch_data( |
|
225 | 225 | data['push']['branches'], data['push']['commits']) |
|
226 | 226 | |
|
227 | 227 | result = render_with_traceback( |
|
228 | 228 | repo_push_template, |
|
229 | 229 | data=data, |
|
230 | 230 | branches_commits=branches_commits, |
|
231 | 231 | ) |
|
232 | 232 | return result |
|
233 | 233 | |
|
234 | 234 | def format_repo_create_event(self, data): |
|
235 | 235 | return '<a href="{}">{}</a> ({}) repository created by <b>{}</b>'.format( |
|
236 | 236 | data['repo']['url'], |
|
237 | 237 | h.html_escape(data['repo']['repo_name']), |
|
238 | 238 | data['repo']['repo_type'], |
|
239 | 239 | data['actor']['username'], |
|
240 | 240 | ) |
|
241 | 241 | |
|
242 | 242 | |
|
243 | 243 | @async_task(ignore_result=True, base=RequestContextTask) |
|
244 | 244 | def post_text_to_hipchat(settings, text): |
|
245 |
log.debug('sending %s to hipchat %s' |
|
|
245 | log.debug('sending %s to hipchat %s', text, settings['server_url']) | |
|
246 | 246 | json_message = { |
|
247 | 247 | "message": text, |
|
248 | 248 | "color": settings.get('color', 'yellow'), |
|
249 | 249 | "notify": settings.get('notify', False), |
|
250 | 250 | } |
|
251 | 251 | |
|
252 | 252 | resp = requests.post(settings['server_url'], json=json_message, timeout=60) |
|
253 | 253 | resp.raise_for_status() # raise exception on a failed request |
@@ -1,350 +1,349 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2012-2018 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | from __future__ import unicode_literals |
|
22 | 22 | import re |
|
23 | 23 | import time |
|
24 | 24 | import textwrap |
|
25 | 25 | import logging |
|
26 | 26 | |
|
27 | 27 | import deform |
|
28 | 28 | import requests |
|
29 | 29 | import colander |
|
30 | 30 | from mako.template import Template |
|
31 | 31 | |
|
32 | 32 | from rhodecode import events |
|
33 | 33 | from rhodecode.translation import _ |
|
34 | 34 | from rhodecode.lib import helpers as h |
|
35 | 35 | from rhodecode.lib.celerylib import run_task, async_task, RequestContextTask |
|
36 | 36 | from rhodecode.lib.colander_utils import strip_whitespace |
|
37 | 37 | from rhodecode.integrations.types.base import ( |
|
38 | 38 | IntegrationTypeBase, CommitParsingDataHandler, render_with_traceback) |
|
39 | 39 | |
|
40 | 40 | log = logging.getLogger(__name__) |
|
41 | 41 | |
|
42 | 42 | |
|
43 | 43 | class SlackSettingsSchema(colander.Schema): |
|
44 | 44 | service = colander.SchemaNode( |
|
45 | 45 | colander.String(), |
|
46 | 46 | title=_('Slack service URL'), |
|
47 | 47 | description=h.literal(_( |
|
48 | 48 | 'This can be setup at the ' |
|
49 | 49 | '<a href="https://my.slack.com/services/new/incoming-webhook/">' |
|
50 | 50 | 'slack app manager</a>')), |
|
51 | 51 | default='', |
|
52 | 52 | preparer=strip_whitespace, |
|
53 | 53 | validator=colander.url, |
|
54 | 54 | widget=deform.widget.TextInputWidget( |
|
55 | 55 | placeholder='https://hooks.slack.com/services/...', |
|
56 | 56 | ), |
|
57 | 57 | ) |
|
58 | 58 | username = colander.SchemaNode( |
|
59 | 59 | colander.String(), |
|
60 | 60 | title=_('Username'), |
|
61 | 61 | description=_('Username to show notifications coming from.'), |
|
62 | 62 | missing='Rhodecode', |
|
63 | 63 | preparer=strip_whitespace, |
|
64 | 64 | widget=deform.widget.TextInputWidget( |
|
65 | 65 | placeholder='Rhodecode' |
|
66 | 66 | ), |
|
67 | 67 | ) |
|
68 | 68 | channel = colander.SchemaNode( |
|
69 | 69 | colander.String(), |
|
70 | 70 | title=_('Channel'), |
|
71 | 71 | description=_('Channel to send notifications to.'), |
|
72 | 72 | missing='', |
|
73 | 73 | preparer=strip_whitespace, |
|
74 | 74 | widget=deform.widget.TextInputWidget( |
|
75 | 75 | placeholder='#general' |
|
76 | 76 | ), |
|
77 | 77 | ) |
|
78 | 78 | icon_emoji = colander.SchemaNode( |
|
79 | 79 | colander.String(), |
|
80 | 80 | title=_('Emoji'), |
|
81 | 81 | description=_('Emoji to use eg. :studio_microphone:'), |
|
82 | 82 | missing='', |
|
83 | 83 | preparer=strip_whitespace, |
|
84 | 84 | widget=deform.widget.TextInputWidget( |
|
85 | 85 | placeholder=':studio_microphone:' |
|
86 | 86 | ), |
|
87 | 87 | ) |
|
88 | 88 | |
|
89 | 89 | |
|
90 | 90 | class SlackIntegrationType(IntegrationTypeBase, CommitParsingDataHandler): |
|
91 | 91 | key = 'slack' |
|
92 | 92 | display_name = _('Slack') |
|
93 | 93 | description = _('Send events such as repo pushes and pull requests to ' |
|
94 | 94 | 'your slack channel.') |
|
95 | 95 | |
|
96 | 96 | @classmethod |
|
97 | 97 | def icon(cls): |
|
98 | 98 | return '''<?xml version="1.0" encoding="UTF-8" standalone="no"?><svg viewBox="0 0 256 256" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" preserveAspectRatio="xMidYMid"><g><path d="M165.963541,15.8384262 C162.07318,3.86308197 149.212328,-2.69009836 137.239082,1.20236066 C125.263738,5.09272131 118.710557,17.9535738 122.603016,29.9268197 L181.550164,211.292328 C185.597902,222.478689 197.682361,228.765377 209.282098,225.426885 C221.381246,221.943607 228.756984,209.093246 224.896,197.21023 C224.749115,196.756984 165.963541,15.8384262 165.963541,15.8384262" fill="#DFA22F"></path><path d="M74.6260984,45.515541 C70.7336393,33.5422951 57.8727869,26.9891148 45.899541,30.8794754 C33.9241967,34.7698361 27.3710164,47.6306885 31.2634754,59.6060328 L90.210623,240.971541 C94.2583607,252.157902 106.34282,258.44459 117.942557,255.104 C130.041705,251.62282 137.417443,238.772459 133.556459,226.887344 C133.409574,226.436197 74.6260984,45.515541 74.6260984,45.515541" fill="#3CB187"></path><path d="M240.161574,166.045377 C252.136918,162.155016 258.688,149.294164 254.797639,137.31882 C250.907279,125.345574 238.046426,118.792393 226.07318,122.682754 L44.7076721,181.632 C33.5213115,185.677639 27.234623,197.762098 30.5731148,209.361836 C34.0563934,221.460984 46.9067541,228.836721 58.7897705,224.975738 C59.2430164,224.828852 240.161574,166.045377 240.161574,166.045377" fill="#CE1E5B"></path><path d="M82.507541,217.270557 C94.312918,213.434754 109.528131,208.491016 125.855475,203.186361 C122.019672,191.380984 117.075934,176.163672 111.76918,159.83423 L68.4191475,173.924721 L82.507541,217.270557" fill="#392538"></path><path d="M173.847082,187.591344 C190.235279,182.267803 205.467279,177.31777 217.195016,173.507148 C213.359213,161.70177 208.413377,146.480262 203.106623,130.146623 L159.75659,144.237115 L173.847082,187.591344" fill="#BB242A"></path><path d="M210.484459,74.7058361 C222.457705,70.8154754 229.010885,57.954623 225.120525,45.9792787 C221.230164,34.0060328 208.369311,27.4528525 196.393967,31.3432131 L15.028459,90.292459 C3.84209836,94.3380984 -2.44459016,106.422557 0.896,118.022295 C4.37718033,130.121443 17.227541,137.49718 29.1126557,133.636197 C29.5638033,133.489311 210.484459,74.7058361 210.484459,74.7058361" fill="#72C5CD"></path><path d="M52.8220328,125.933115 C64.6274098,122.097311 79.8468197,117.151475 96.1762623,111.84682 C90.8527213,95.4565246 85.9026885,80.2245246 82.0920656,68.4946885 L38.731541,82.5872787 L52.8220328,125.933115" fill="#248C73"></path><path d="M144.159475,96.256 C160.551869,90.9303607 175.785967,85.9803279 187.515803,82.1676066 C182.190164,65.7752131 177.240131,50.5390164 173.42741,38.807082 L130.068984,52.8996721 L144.159475,96.256" fill="#62803A"></path></g></svg>''' |
|
99 | 99 | |
|
100 | 100 | valid_events = [ |
|
101 | 101 | events.PullRequestCloseEvent, |
|
102 | 102 | events.PullRequestMergeEvent, |
|
103 | 103 | events.PullRequestUpdateEvent, |
|
104 | 104 | events.PullRequestCommentEvent, |
|
105 | 105 | events.PullRequestReviewEvent, |
|
106 | 106 | events.PullRequestCreateEvent, |
|
107 | 107 | events.RepoPushEvent, |
|
108 | 108 | events.RepoCreateEvent, |
|
109 | 109 | ] |
|
110 | 110 | |
|
111 | 111 | def send_event(self, event): |
|
112 | 112 | if event.__class__ not in self.valid_events: |
|
113 |
log.debug('event not valid: %r' |
|
|
113 | log.debug('event not valid: %r', event) | |
|
114 | 114 | return |
|
115 | 115 | |
|
116 | 116 | if event.name not in self.settings['events']: |
|
117 |
log.debug('event ignored: %r' |
|
|
117 | log.debug('event ignored: %r', event) | |
|
118 | 118 | return |
|
119 | 119 | |
|
120 | 120 | data = event.as_dict() |
|
121 | 121 | |
|
122 | 122 | # defaults |
|
123 | 123 | title = '*%s* caused a *%s* event' % ( |
|
124 | 124 | data['actor']['username'], event.name) |
|
125 | 125 | text = '*%s* caused a *%s* event' % ( |
|
126 | 126 | data['actor']['username'], event.name) |
|
127 | 127 | fields = None |
|
128 | 128 | overrides = None |
|
129 | 129 | |
|
130 |
log.debug('handling slack event for %s' |
|
|
130 | log.debug('handling slack event for %s', event.name) | |
|
131 | 131 | |
|
132 | 132 | if isinstance(event, events.PullRequestCommentEvent): |
|
133 | 133 | (title, text, fields, overrides) \ |
|
134 | 134 | = self.format_pull_request_comment_event(event, data) |
|
135 | 135 | elif isinstance(event, events.PullRequestReviewEvent): |
|
136 | 136 | title, text = self.format_pull_request_review_event(event, data) |
|
137 | 137 | elif isinstance(event, events.PullRequestEvent): |
|
138 | 138 | title, text = self.format_pull_request_event(event, data) |
|
139 | 139 | elif isinstance(event, events.RepoPushEvent): |
|
140 | 140 | title, text = self.format_repo_push_event(data) |
|
141 | 141 | elif isinstance(event, events.RepoCreateEvent): |
|
142 | 142 | title, text = self.format_repo_create_event(data) |
|
143 | 143 | else: |
|
144 |
log.error('unhandled event type: %r' |
|
|
144 | log.error('unhandled event type: %r', event) | |
|
145 | 145 | |
|
146 | 146 | run_task(post_text_to_slack, self.settings, title, text, fields, overrides) |
|
147 | 147 | |
|
148 | 148 | def settings_schema(self): |
|
149 | 149 | schema = SlackSettingsSchema() |
|
150 | 150 | schema.add(colander.SchemaNode( |
|
151 | 151 | colander.Set(), |
|
152 | 152 | widget=deform.widget.CheckboxChoiceWidget( |
|
153 | 153 | values=sorted( |
|
154 | 154 | [(e.name, e.display_name) for e in self.valid_events] |
|
155 | 155 | ) |
|
156 | 156 | ), |
|
157 | 157 | description="Events activated for this integration", |
|
158 | 158 | name='events' |
|
159 | 159 | )) |
|
160 | 160 | |
|
161 | 161 | return schema |
|
162 | 162 | |
|
163 | 163 | def format_pull_request_comment_event(self, event, data): |
|
164 | 164 | comment_text = data['comment']['text'] |
|
165 | 165 | if len(comment_text) > 200: |
|
166 | 166 | comment_text = '<{comment_url}|{comment_text}...>'.format( |
|
167 | 167 | comment_text=comment_text[:200], |
|
168 | 168 | comment_url=data['comment']['url'], |
|
169 | 169 | ) |
|
170 | 170 | |
|
171 | 171 | fields = None |
|
172 | 172 | overrides = None |
|
173 | 173 | status_text = None |
|
174 | 174 | |
|
175 | 175 | if data['comment']['status']: |
|
176 | 176 | status_color = { |
|
177 | 177 | 'approved': '#0ac878', |
|
178 | 178 | 'rejected': '#e85e4d'}.get(data['comment']['status']) |
|
179 | 179 | |
|
180 | 180 | if status_color: |
|
181 | 181 | overrides = {"color": status_color} |
|
182 | 182 | |
|
183 | 183 | status_text = data['comment']['status'] |
|
184 | 184 | |
|
185 | 185 | if data['comment']['file']: |
|
186 | 186 | fields = [ |
|
187 | 187 | { |
|
188 | 188 | "title": "file", |
|
189 | 189 | "value": data['comment']['file'] |
|
190 | 190 | }, |
|
191 | 191 | { |
|
192 | 192 | "title": "line", |
|
193 | 193 | "value": data['comment']['line'] |
|
194 | 194 | } |
|
195 | 195 | ] |
|
196 | 196 | |
|
197 | 197 | template = Template(textwrap.dedent(r''' |
|
198 | 198 | *${data['actor']['username']}* left ${data['comment']['type']} on pull request <${data['pullrequest']['url']}|#${data['pullrequest']['pull_request_id']}>: |
|
199 | 199 | ''')) |
|
200 | 200 | title = render_with_traceback( |
|
201 | 201 | template, data=data, comment=event.comment) |
|
202 | 202 | |
|
203 | 203 | template = Template(textwrap.dedent(r''' |
|
204 | 204 | *pull request title*: ${pr_title} |
|
205 | 205 | % if status_text: |
|
206 | 206 | *submitted status*: `${status_text}` |
|
207 | 207 | % endif |
|
208 | 208 | >>> ${comment_text} |
|
209 | 209 | ''')) |
|
210 | 210 | text = render_with_traceback( |
|
211 | 211 | template, |
|
212 | 212 | comment_text=comment_text, |
|
213 | 213 | pr_title=data['pullrequest']['title'], |
|
214 | 214 | status_text=status_text) |
|
215 | 215 | |
|
216 | 216 | return title, text, fields, overrides |
|
217 | 217 | |
|
218 | 218 | def format_pull_request_review_event(self, event, data): |
|
219 | 219 | template = Template(textwrap.dedent(r''' |
|
220 | 220 | *${data['actor']['username']}* changed status of pull request <${data['pullrequest']['url']}|#${data['pullrequest']['pull_request_id']} to `${data['pullrequest']['status']}`>: |
|
221 | 221 | ''')) |
|
222 | 222 | title = render_with_traceback(template, data=data) |
|
223 | 223 | |
|
224 | 224 | template = Template(textwrap.dedent(r''' |
|
225 | 225 | *pull request title*: ${pr_title} |
|
226 | 226 | ''')) |
|
227 | 227 | text = render_with_traceback( |
|
228 | 228 | template, |
|
229 | 229 | pr_title=data['pullrequest']['title']) |
|
230 | 230 | |
|
231 | 231 | return title, text |
|
232 | 232 | |
|
233 | 233 | def format_pull_request_event(self, event, data): |
|
234 | 234 | action = { |
|
235 | 235 | events.PullRequestCloseEvent: 'closed', |
|
236 | 236 | events.PullRequestMergeEvent: 'merged', |
|
237 | 237 | events.PullRequestUpdateEvent: 'updated', |
|
238 | 238 | events.PullRequestCreateEvent: 'created', |
|
239 | 239 | }.get(event.__class__, str(event.__class__)) |
|
240 | 240 | |
|
241 | 241 | template = Template(textwrap.dedent(r''' |
|
242 | 242 | *${data['actor']['username']}* `${action}` pull request <${data['pullrequest']['url']}|#${data['pullrequest']['pull_request_id']}>: |
|
243 | 243 | ''')) |
|
244 | 244 | title = render_with_traceback(template, data=data, action=action) |
|
245 | 245 | |
|
246 | 246 | template = Template(textwrap.dedent(r''' |
|
247 | 247 | *pull request title*: ${pr_title} |
|
248 | 248 | %if data['pullrequest']['commits']: |
|
249 | 249 | *commits*: ${len(data['pullrequest']['commits'])} |
|
250 | 250 | %endif |
|
251 | 251 | ''')) |
|
252 | 252 | text = render_with_traceback( |
|
253 | 253 | template, |
|
254 | 254 | pr_title=data['pullrequest']['title'], |
|
255 | 255 | data=data) |
|
256 | 256 | |
|
257 | 257 | return title, text |
|
258 | 258 | |
|
259 | 259 | def format_repo_push_event(self, data): |
|
260 | 260 | |
|
261 | 261 | branches_commits = self.aggregate_branch_data( |
|
262 | 262 | data['push']['branches'], data['push']['commits']) |
|
263 | 263 | |
|
264 | 264 | template = Template(r''' |
|
265 | 265 | *${data['actor']['username']}* pushed to repo <${data['repo']['url']}|${data['repo']['repo_name']}>: |
|
266 | 266 | ''') |
|
267 | 267 | title = render_with_traceback(template, data=data) |
|
268 | 268 | |
|
269 | 269 | repo_push_template = Template(textwrap.dedent(r''' |
|
270 | 270 | <% |
|
271 | 271 | def branch_text(branch): |
|
272 | 272 | if branch: |
|
273 | 273 | return 'on branch: <{}|{}>'.format(branch_commits['branch']['url'], branch_commits['branch']['name']) |
|
274 | 274 | else: |
|
275 | 275 | ## case for SVN no branch push... |
|
276 | 276 | return 'to trunk' |
|
277 | 277 | %> \ |
|
278 | 278 | % for branch, branch_commits in branches_commits.items(): |
|
279 | 279 | ${len(branch_commits['commits'])} ${'commit' if len(branch_commits['commits']) == 1 else 'commits'} ${branch_text(branch)} |
|
280 | 280 | % for commit in branch_commits['commits']: |
|
281 | 281 | `<${commit['url']}|${commit['short_id']}>` - ${commit['message_html']|html_to_slack_links} |
|
282 | 282 | % endfor |
|
283 | 283 | % endfor |
|
284 | 284 | ''')) |
|
285 | 285 | |
|
286 | 286 | text = render_with_traceback( |
|
287 | 287 | repo_push_template, |
|
288 | 288 | data=data, |
|
289 | 289 | branches_commits=branches_commits, |
|
290 | 290 | html_to_slack_links=html_to_slack_links, |
|
291 | 291 | ) |
|
292 | 292 | |
|
293 | 293 | return title, text |
|
294 | 294 | |
|
295 | 295 | def format_repo_create_event(self, data): |
|
296 | 296 | template = Template(r''' |
|
297 | 297 | *${data['actor']['username']}* created new repository ${data['repo']['repo_name']}: |
|
298 | 298 | ''') |
|
299 | 299 | title = render_with_traceback(template, data=data) |
|
300 | 300 | |
|
301 | 301 | template = Template(textwrap.dedent(r''' |
|
302 | 302 | repo_url: ${data['repo']['url']} |
|
303 | 303 | repo_type: ${data['repo']['repo_type']} |
|
304 | 304 | ''')) |
|
305 | 305 | text = render_with_traceback(template, data=data) |
|
306 | 306 | |
|
307 | 307 | return title, text |
|
308 | 308 | |
|
309 | 309 | |
|
310 | 310 | def html_to_slack_links(message): |
|
311 | 311 | return re.compile(r'<a .*?href=["\'](.+?)".*?>(.+?)</a>').sub( |
|
312 | 312 | r'<\1|\2>', message) |
|
313 | 313 | |
|
314 | 314 | |
|
315 | 315 | @async_task(ignore_result=True, base=RequestContextTask) |
|
316 | 316 | def post_text_to_slack(settings, title, text, fields=None, overrides=None): |
|
317 |
log.debug('sending %s (%s) to slack %s' |
|
|
318 | title, text, settings['service'])) | |
|
317 | log.debug('sending %s (%s) to slack %s', title, text, settings['service']) | |
|
319 | 318 | |
|
320 | 319 | fields = fields or [] |
|
321 | 320 | overrides = overrides or {} |
|
322 | 321 | |
|
323 | 322 | message_data = { |
|
324 | 323 | "fallback": text, |
|
325 | 324 | "color": "#427cc9", |
|
326 | 325 | "pretext": title, |
|
327 | 326 | #"author_name": "Bobby Tables", |
|
328 | 327 | #"author_link": "http://flickr.com/bobby/", |
|
329 | 328 | #"author_icon": "http://flickr.com/icons/bobby.jpg", |
|
330 | 329 | #"title": "Slack API Documentation", |
|
331 | 330 | #"title_link": "https://api.slack.com/", |
|
332 | 331 | "text": text, |
|
333 | 332 | "fields": fields, |
|
334 | 333 | #"image_url": "http://my-website.com/path/to/image.jpg", |
|
335 | 334 | #"thumb_url": "http://example.com/path/to/thumb.png", |
|
336 | 335 | "footer": "RhodeCode", |
|
337 | 336 | #"footer_icon": "", |
|
338 | 337 | "ts": time.time(), |
|
339 | 338 | "mrkdwn_in": ["pretext", "text"] |
|
340 | 339 | } |
|
341 | 340 | message_data.update(overrides) |
|
342 | 341 | json_message = { |
|
343 | 342 | "icon_emoji": settings.get('icon_emoji', ':studio_microphone:'), |
|
344 | 343 | "channel": settings.get('channel', ''), |
|
345 | 344 | "username": settings.get('username', 'Rhodecode'), |
|
346 | 345 | "attachments": [message_data] |
|
347 | 346 | } |
|
348 | 347 | |
|
349 | 348 | resp = requests.post(settings['service'], json=json_message, timeout=60) |
|
350 | 349 | resp.raise_for_status() # raise exception on a failed request |
@@ -1,274 +1,274 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2012-2018 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | from __future__ import unicode_literals |
|
22 | 22 | |
|
23 | 23 | import deform |
|
24 | 24 | import deform.widget |
|
25 | 25 | import logging |
|
26 | 26 | import requests |
|
27 | 27 | import requests.adapters |
|
28 | 28 | import colander |
|
29 | 29 | from requests.packages.urllib3.util.retry import Retry |
|
30 | 30 | |
|
31 | 31 | import rhodecode |
|
32 | 32 | from rhodecode import events |
|
33 | 33 | from rhodecode.translation import _ |
|
34 | 34 | from rhodecode.integrations.types.base import ( |
|
35 | 35 | IntegrationTypeBase, get_auth, get_web_token, get_url_vars, |
|
36 | 36 | WebhookDataHandler, WEBHOOK_URL_VARS) |
|
37 | 37 | from rhodecode.lib.celerylib import run_task, async_task, RequestContextTask |
|
38 | 38 | from rhodecode.model.validation_schema import widgets |
|
39 | 39 | |
|
40 | 40 | log = logging.getLogger(__name__) |
|
41 | 41 | |
|
42 | 42 | |
|
43 | 43 | # updating this required to update the `common_vars` passed in url calling func |
|
44 | 44 | |
|
45 | 45 | URL_VARS = get_url_vars(WEBHOOK_URL_VARS) |
|
46 | 46 | |
|
47 | 47 | |
|
48 | 48 | class WebhookSettingsSchema(colander.Schema): |
|
49 | 49 | url = colander.SchemaNode( |
|
50 | 50 | colander.String(), |
|
51 | 51 | title=_('Webhook URL'), |
|
52 | 52 | description= |
|
53 | 53 | _('URL to which Webhook should submit data. If used some of the ' |
|
54 | 54 | 'variables would trigger multiple calls, like ${branch} or ' |
|
55 | 55 | '${commit_id}. Webhook will be called as many times as unique ' |
|
56 | 56 | 'objects in data in such cases.'), |
|
57 | 57 | missing=colander.required, |
|
58 | 58 | required=True, |
|
59 | 59 | validator=colander.url, |
|
60 | 60 | widget=widgets.CodeMirrorWidget( |
|
61 | 61 | help_block_collapsable_name='Show url variables', |
|
62 | 62 | help_block_collapsable=( |
|
63 | 63 | 'E.g http://my-serv/trigger_job/${{event_name}}' |
|
64 | 64 | '?PR_ID=${{pull_request_id}}' |
|
65 | 65 | '\nFull list of vars:\n{}'.format(URL_VARS)), |
|
66 | 66 | codemirror_mode='text', |
|
67 | 67 | codemirror_options='{"lineNumbers": false, "lineWrapping": true}'), |
|
68 | 68 | ) |
|
69 | 69 | secret_token = colander.SchemaNode( |
|
70 | 70 | colander.String(), |
|
71 | 71 | title=_('Secret Token'), |
|
72 | 72 | description=_('Optional string used to validate received payloads. ' |
|
73 | 73 | 'It will be sent together with event data in JSON'), |
|
74 | 74 | default='', |
|
75 | 75 | missing='', |
|
76 | 76 | widget=deform.widget.TextInputWidget( |
|
77 | 77 | placeholder='e.g. secret_token' |
|
78 | 78 | ), |
|
79 | 79 | ) |
|
80 | 80 | username = colander.SchemaNode( |
|
81 | 81 | colander.String(), |
|
82 | 82 | title=_('Username'), |
|
83 | 83 | description=_('Optional username to authenticate the call.'), |
|
84 | 84 | default='', |
|
85 | 85 | missing='', |
|
86 | 86 | widget=deform.widget.TextInputWidget( |
|
87 | 87 | placeholder='e.g. admin' |
|
88 | 88 | ), |
|
89 | 89 | ) |
|
90 | 90 | password = colander.SchemaNode( |
|
91 | 91 | colander.String(), |
|
92 | 92 | title=_('Password'), |
|
93 | 93 | description=_('Optional password to authenticate the call.'), |
|
94 | 94 | default='', |
|
95 | 95 | missing='', |
|
96 | 96 | widget=deform.widget.PasswordWidget( |
|
97 | 97 | placeholder='e.g. secret.', |
|
98 | 98 | redisplay=True, |
|
99 | 99 | ), |
|
100 | 100 | ) |
|
101 | 101 | custom_header_key = colander.SchemaNode( |
|
102 | 102 | colander.String(), |
|
103 | 103 | title=_('Custom Header Key'), |
|
104 | 104 | description=_('Custom Header name to be set when calling endpoint.'), |
|
105 | 105 | default='', |
|
106 | 106 | missing='', |
|
107 | 107 | widget=deform.widget.TextInputWidget( |
|
108 | 108 | placeholder='e.g: Authorization' |
|
109 | 109 | ), |
|
110 | 110 | ) |
|
111 | 111 | custom_header_val = colander.SchemaNode( |
|
112 | 112 | colander.String(), |
|
113 | 113 | title=_('Custom Header Value'), |
|
114 | 114 | description=_('Custom Header value to be set when calling endpoint.'), |
|
115 | 115 | default='', |
|
116 | 116 | missing='', |
|
117 | 117 | widget=deform.widget.TextInputWidget( |
|
118 | 118 | placeholder='e.g. Basic XxXxXx' |
|
119 | 119 | ), |
|
120 | 120 | ) |
|
121 | 121 | method_type = colander.SchemaNode( |
|
122 | 122 | colander.String(), |
|
123 | 123 | title=_('Call Method'), |
|
124 | 124 | description=_('Select if the Webhook call should be made ' |
|
125 | 125 | 'with POST or GET.'), |
|
126 | 126 | default='post', |
|
127 | 127 | missing='', |
|
128 | 128 | widget=deform.widget.RadioChoiceWidget( |
|
129 | 129 | values=[('get', 'GET'), ('post', 'POST')], |
|
130 | 130 | inline=True |
|
131 | 131 | ), |
|
132 | 132 | ) |
|
133 | 133 | |
|
134 | 134 | |
|
135 | 135 | class WebhookIntegrationType(IntegrationTypeBase): |
|
136 | 136 | key = 'webhook' |
|
137 | 137 | display_name = _('Webhook') |
|
138 | 138 | description = _('send JSON data to a url endpoint') |
|
139 | 139 | |
|
140 | 140 | @classmethod |
|
141 | 141 | def icon(cls): |
|
142 | 142 | return '''<?xml version="1.0" encoding="UTF-8" standalone="no"?><svg viewBox="0 0 256 239" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" preserveAspectRatio="xMidYMid"><g><path d="M119.540432,100.502743 C108.930124,118.338815 98.7646301,135.611455 88.3876025,152.753617 C85.7226696,157.154315 84.4040417,160.738531 86.5332204,166.333309 C92.4107024,181.787152 84.1193605,196.825836 68.5350381,200.908244 C53.8383677,204.759349 39.5192953,195.099955 36.6032893,179.365384 C34.0194114,165.437749 44.8274148,151.78491 60.1824106,149.608284 C61.4694072,149.424428 62.7821041,149.402681 64.944891,149.240571 C72.469175,136.623655 80.1773157,123.700312 88.3025935,110.073173 C73.611854,95.4654658 64.8677898,78.3885437 66.803227,57.2292132 C68.1712787,42.2715849 74.0527146,29.3462646 84.8033863,18.7517722 C105.393354,-1.53572199 136.805164,-4.82141828 161.048542,10.7510424 C184.333097,25.7086706 194.996783,54.8450075 185.906752,79.7822957 C179.052655,77.9239597 172.151111,76.049808 164.563565,73.9917997 C167.418285,60.1274266 165.306899,47.6765751 155.95591,37.0109123 C149.777932,29.9690049 141.850349,26.2780332 132.835442,24.9178894 C114.764113,22.1877169 97.0209573,33.7983633 91.7563309,51.5355878 C85.7800012,71.6669027 94.8245623,88.1111998 119.540432,100.502743 L119.540432,100.502743 Z" fill="#C73A63"></path><path d="M149.841194,79.4106285 C157.316054,92.5969067 164.905578,105.982857 172.427885,119.246236 C210.44865,107.483365 239.114472,128.530009 249.398582,151.063322 C261.81978,178.282014 253.328765,210.520191 228.933162,227.312431 C203.893073,244.551464 172.226236,241.605803 150.040866,219.46195 C155.694953,214.729124 161.376716,209.974552 167.44794,204.895759 C189.360489,219.088306 208.525074,218.420096 222.753207,201.614016 C234.885769,187.277151 234.622834,165.900356 222.138374,151.863988 C207.730339,135.66681 188.431321,135.172572 165.103273,150.721309 C155.426087,133.553447 145.58086,116.521995 136.210101,99.2295848 C133.05093,93.4015266 129.561608,90.0209366 122.440622,88.7873178 C110.547271,86.7253555 102.868785,76.5124151 102.408155,65.0698097 C101.955433,53.7537294 108.621719,43.5249733 119.04224,39.5394355 C129.363912,35.5914599 141.476705,38.7783085 148.419765,47.554004 C154.093621,54.7244134 155.896602,62.7943365 152.911402,71.6372484 C152.081082,74.1025091 151.00562,76.4886916 149.841194,79.4106285 L149.841194,79.4106285 Z" fill="#4B4B4B"></path><path d="M167.706921,187.209935 L121.936499,187.209935 C117.54964,205.253587 108.074103,219.821756 91.7464461,229.085759 C79.0544063,236.285822 65.3738898,238.72736 50.8136292,236.376762 C24.0061432,232.053165 2.08568567,207.920497 0.156179306,180.745298 C-2.02835403,149.962159 19.1309765,122.599149 47.3341915,116.452801 C49.2814904,123.524363 51.2485589,130.663141 53.1958579,137.716911 C27.3195169,150.919004 18.3639187,167.553089 25.6054984,188.352614 C31.9811726,206.657224 50.0900643,216.690262 69.7528413,212.809503 C89.8327554,208.847688 99.9567329,192.160226 98.7211371,165.37844 C117.75722,165.37844 136.809118,165.180745 155.847178,165.475311 C163.280522,165.591951 169.019617,164.820939 174.620326,158.267339 C183.840836,147.48306 200.811003,148.455721 210.741239,158.640984 C220.88894,169.049642 220.402609,185.79839 209.663799,195.768166 C199.302587,205.38802 182.933414,204.874012 173.240413,194.508846 C171.247644,192.37176 169.677943,189.835329 167.706921,187.209935 L167.706921,187.209935 Z" fill="#4A4A4A"></path></g></svg>''' |
|
143 | 143 | |
|
144 | 144 | valid_events = [ |
|
145 | 145 | events.PullRequestCloseEvent, |
|
146 | 146 | events.PullRequestMergeEvent, |
|
147 | 147 | events.PullRequestUpdateEvent, |
|
148 | 148 | events.PullRequestCommentEvent, |
|
149 | 149 | events.PullRequestReviewEvent, |
|
150 | 150 | events.PullRequestCreateEvent, |
|
151 | 151 | events.RepoPushEvent, |
|
152 | 152 | events.RepoCreateEvent, |
|
153 | 153 | ] |
|
154 | 154 | |
|
155 | 155 | def settings_schema(self): |
|
156 | 156 | schema = WebhookSettingsSchema() |
|
157 | 157 | schema.add(colander.SchemaNode( |
|
158 | 158 | colander.Set(), |
|
159 | 159 | widget=deform.widget.CheckboxChoiceWidget( |
|
160 | 160 | values=sorted( |
|
161 | 161 | [(e.name, e.display_name) for e in self.valid_events] |
|
162 | 162 | ) |
|
163 | 163 | ), |
|
164 | 164 | description="Events activated for this integration", |
|
165 | 165 | name='events' |
|
166 | 166 | )) |
|
167 | 167 | return schema |
|
168 | 168 | |
|
169 | 169 | def send_event(self, event): |
|
170 | 170 | log.debug( |
|
171 | 171 | 'handling event %s with Webhook integration %s', event.name, self) |
|
172 | 172 | |
|
173 | 173 | if event.__class__ not in self.valid_events: |
|
174 |
log.debug('event not valid: %r' |
|
|
174 | log.debug('event not valid: %r', event) | |
|
175 | 175 | return |
|
176 | 176 | |
|
177 | 177 | if event.name not in self.settings['events']: |
|
178 |
log.debug('event ignored: %r' |
|
|
178 | log.debug('event ignored: %r', event) | |
|
179 | 179 | return |
|
180 | 180 | |
|
181 | 181 | data = event.as_dict() |
|
182 | 182 | template_url = self.settings['url'] |
|
183 | 183 | |
|
184 | 184 | headers = {} |
|
185 | 185 | head_key = self.settings.get('custom_header_key') |
|
186 | 186 | head_val = self.settings.get('custom_header_val') |
|
187 | 187 | if head_key and head_val: |
|
188 | 188 | headers = {head_key: head_val} |
|
189 | 189 | |
|
190 | 190 | handler = WebhookDataHandler(template_url, headers) |
|
191 | 191 | |
|
192 | 192 | url_calls = handler(event, data) |
|
193 | 193 | log.debug('webhook: calling following urls: %s', |
|
194 | 194 | [x[0] for x in url_calls]) |
|
195 | 195 | |
|
196 | 196 | run_task(post_to_webhook, url_calls, self.settings) |
|
197 | 197 | |
|
198 | 198 | |
|
199 | 199 | @async_task(ignore_result=True, base=RequestContextTask) |
|
200 | 200 | def post_to_webhook(url_calls, settings): |
|
201 | 201 | """ |
|
202 | 202 | Example data:: |
|
203 | 203 | |
|
204 | 204 | {'actor': {'user_id': 2, 'username': u'admin'}, |
|
205 | 205 | 'actor_ip': u'192.168.157.1', |
|
206 | 206 | 'name': 'repo-push', |
|
207 | 207 | 'push': {'branches': [{'name': u'default', |
|
208 | 208 | 'url': 'http://rc.local:8080/hg-repo/changelog?branch=default'}], |
|
209 | 209 | 'commits': [{'author': u'Marcin Kuzminski <marcin@rhodecode.com>', |
|
210 | 210 | 'branch': u'default', |
|
211 | 211 | 'date': datetime.datetime(2017, 11, 30, 12, 59, 48), |
|
212 | 212 | 'issues': [], |
|
213 | 213 | 'mentions': [], |
|
214 | 214 | 'message': u'commit Thu 30 Nov 2017 13:59:48 CET', |
|
215 | 215 | 'message_html': u'commit Thu 30 Nov 2017 13:59:48 CET', |
|
216 | 216 | 'message_html_title': u'commit Thu 30 Nov 2017 13:59:48 CET', |
|
217 | 217 | 'parents': [{'raw_id': '431b772a5353dad9974b810dd3707d79e3a7f6e0'}], |
|
218 | 218 | 'permalink_url': u'http://rc.local:8080/_7/changeset/a815cc738b9651eb5ffbcfb1ce6ccd7c701a5ddf', |
|
219 | 219 | 'raw_id': 'a815cc738b9651eb5ffbcfb1ce6ccd7c701a5ddf', |
|
220 | 220 | 'refs': {'bookmarks': [], 'branches': [u'default'], 'tags': [u'tip']}, |
|
221 | 221 | 'reviewers': [], |
|
222 | 222 | 'revision': 9L, |
|
223 | 223 | 'short_id': 'a815cc738b96', |
|
224 | 224 | 'url': u'http://rc.local:8080/hg-repo/changeset/a815cc738b9651eb5ffbcfb1ce6ccd7c701a5ddf'}], |
|
225 | 225 | 'issues': {}}, |
|
226 | 226 | 'repo': {'extra_fields': '', |
|
227 | 227 | 'permalink_url': u'http://rc.local:8080/_7', |
|
228 | 228 | 'repo_id': 7, |
|
229 | 229 | 'repo_name': u'hg-repo', |
|
230 | 230 | 'repo_type': u'hg', |
|
231 | 231 | 'url': u'http://rc.local:8080/hg-repo'}, |
|
232 | 232 | 'server_url': u'http://rc.local:8080', |
|
233 | 233 | 'utc_timestamp': datetime.datetime(2017, 11, 30, 13, 0, 1, 569276) |
|
234 | 234 | |
|
235 | 235 | """ |
|
236 | 236 | max_retries = 3 |
|
237 | 237 | retries = Retry( |
|
238 | 238 | total=max_retries, |
|
239 | 239 | backoff_factor=0.15, |
|
240 | 240 | status_forcelist=[500, 502, 503, 504]) |
|
241 | 241 | call_headers = { |
|
242 | 242 | 'User-Agent': 'RhodeCode-webhook-caller/{}'.format( |
|
243 | 243 | rhodecode.__version__) |
|
244 | 244 | } # updated below with custom ones, allows override |
|
245 | 245 | |
|
246 | 246 | auth = get_auth(settings) |
|
247 | 247 | token = get_web_token(settings) |
|
248 | 248 | |
|
249 | 249 | for url, headers, data in url_calls: |
|
250 | 250 | req_session = requests.Session() |
|
251 | 251 | req_session.mount( # retry max N times |
|
252 | 252 | 'http://', requests.adapters.HTTPAdapter(max_retries=retries)) |
|
253 | 253 | |
|
254 | 254 | method = settings.get('method_type') or 'post' |
|
255 | 255 | call_method = getattr(req_session, method) |
|
256 | 256 | |
|
257 | 257 | headers = headers or {} |
|
258 | 258 | call_headers.update(headers) |
|
259 | 259 | |
|
260 | 260 | log.debug('calling Webhook with method: %s, and auth:%s', |
|
261 | 261 | call_method, auth) |
|
262 | 262 | if settings.get('log_data'): |
|
263 | 263 | log.debug('calling webhook with data: %s', data) |
|
264 | 264 | resp = call_method(url, json={ |
|
265 | 265 | 'token': token, |
|
266 | 266 | 'event': data |
|
267 | 267 | }, headers=call_headers, auth=auth, timeout=60) |
|
268 | 268 | log.debug('Got Webhook response: %s', resp) |
|
269 | 269 | |
|
270 | 270 | try: |
|
271 | 271 | resp.raise_for_status() # raise exception on a failed request |
|
272 | 272 | except Exception: |
|
273 | 273 | log.error(resp.text) |
|
274 | 274 | raise |
@@ -1,2342 +1,2338 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2018 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | authentication and permission libraries |
|
23 | 23 | """ |
|
24 | 24 | |
|
25 | 25 | import os |
|
26 | 26 | import time |
|
27 | 27 | import inspect |
|
28 | 28 | import collections |
|
29 | 29 | import fnmatch |
|
30 | 30 | import hashlib |
|
31 | 31 | import itertools |
|
32 | 32 | import logging |
|
33 | 33 | import random |
|
34 | 34 | import traceback |
|
35 | 35 | from functools import wraps |
|
36 | 36 | |
|
37 | 37 | import ipaddress |
|
38 | 38 | |
|
39 | 39 | from pyramid.httpexceptions import HTTPForbidden, HTTPFound, HTTPNotFound |
|
40 | 40 | from sqlalchemy.orm.exc import ObjectDeletedError |
|
41 | 41 | from sqlalchemy.orm import joinedload |
|
42 | 42 | from zope.cachedescriptors.property import Lazy as LazyProperty |
|
43 | 43 | |
|
44 | 44 | import rhodecode |
|
45 | 45 | from rhodecode.model import meta |
|
46 | 46 | from rhodecode.model.meta import Session |
|
47 | 47 | from rhodecode.model.user import UserModel |
|
48 | 48 | from rhodecode.model.db import ( |
|
49 | 49 | User, Repository, Permission, UserToPerm, UserGroupToPerm, UserGroupMember, |
|
50 | 50 | UserIpMap, UserApiKeys, RepoGroup, UserGroup) |
|
51 | 51 | from rhodecode.lib import rc_cache |
|
52 | 52 | from rhodecode.lib.utils2 import safe_unicode, aslist, safe_str, md5, safe_int, sha1 |
|
53 | 53 | from rhodecode.lib.utils import ( |
|
54 | 54 | get_repo_slug, get_repo_group_slug, get_user_group_slug) |
|
55 | 55 | from rhodecode.lib.caching_query import FromCache |
|
56 | 56 | |
|
57 | 57 | |
|
58 | 58 | if rhodecode.is_unix: |
|
59 | 59 | import bcrypt |
|
60 | 60 | |
|
61 | 61 | log = logging.getLogger(__name__) |
|
62 | 62 | |
|
63 | 63 | csrf_token_key = "csrf_token" |
|
64 | 64 | |
|
65 | 65 | |
|
66 | 66 | class PasswordGenerator(object): |
|
67 | 67 | """ |
|
68 | 68 | This is a simple class for generating password from different sets of |
|
69 | 69 | characters |
|
70 | 70 | usage:: |
|
71 | 71 | |
|
72 | 72 | passwd_gen = PasswordGenerator() |
|
73 | 73 | #print 8-letter password containing only big and small letters |
|
74 | 74 | of alphabet |
|
75 | 75 | passwd_gen.gen_password(8, passwd_gen.ALPHABETS_BIG_SMALL) |
|
76 | 76 | """ |
|
77 | 77 | ALPHABETS_NUM = r'''1234567890''' |
|
78 | 78 | ALPHABETS_SMALL = r'''qwertyuiopasdfghjklzxcvbnm''' |
|
79 | 79 | ALPHABETS_BIG = r'''QWERTYUIOPASDFGHJKLZXCVBNM''' |
|
80 | 80 | ALPHABETS_SPECIAL = r'''`-=[]\;',./~!@#$%^&*()_+{}|:"<>?''' |
|
81 | 81 | ALPHABETS_FULL = ALPHABETS_BIG + ALPHABETS_SMALL \ |
|
82 | 82 | + ALPHABETS_NUM + ALPHABETS_SPECIAL |
|
83 | 83 | ALPHABETS_ALPHANUM = ALPHABETS_BIG + ALPHABETS_SMALL + ALPHABETS_NUM |
|
84 | 84 | ALPHABETS_BIG_SMALL = ALPHABETS_BIG + ALPHABETS_SMALL |
|
85 | 85 | ALPHABETS_ALPHANUM_BIG = ALPHABETS_BIG + ALPHABETS_NUM |
|
86 | 86 | ALPHABETS_ALPHANUM_SMALL = ALPHABETS_SMALL + ALPHABETS_NUM |
|
87 | 87 | |
|
88 | 88 | def __init__(self, passwd=''): |
|
89 | 89 | self.passwd = passwd |
|
90 | 90 | |
|
91 | 91 | def gen_password(self, length, type_=None): |
|
92 | 92 | if type_ is None: |
|
93 | 93 | type_ = self.ALPHABETS_FULL |
|
94 | 94 | self.passwd = ''.join([random.choice(type_) for _ in range(length)]) |
|
95 | 95 | return self.passwd |
|
96 | 96 | |
|
97 | 97 | |
|
98 | 98 | class _RhodeCodeCryptoBase(object): |
|
99 | 99 | ENC_PREF = None |
|
100 | 100 | |
|
101 | 101 | def hash_create(self, str_): |
|
102 | 102 | """ |
|
103 | 103 | hash the string using |
|
104 | 104 | |
|
105 | 105 | :param str_: password to hash |
|
106 | 106 | """ |
|
107 | 107 | raise NotImplementedError |
|
108 | 108 | |
|
109 | 109 | def hash_check_with_upgrade(self, password, hashed): |
|
110 | 110 | """ |
|
111 | 111 | Returns tuple in which first element is boolean that states that |
|
112 | 112 | given password matches it's hashed version, and the second is new hash |
|
113 | 113 | of the password, in case this password should be migrated to new |
|
114 | 114 | cipher. |
|
115 | 115 | """ |
|
116 | 116 | checked_hash = self.hash_check(password, hashed) |
|
117 | 117 | return checked_hash, None |
|
118 | 118 | |
|
119 | 119 | def hash_check(self, password, hashed): |
|
120 | 120 | """ |
|
121 | 121 | Checks matching password with it's hashed value. |
|
122 | 122 | |
|
123 | 123 | :param password: password |
|
124 | 124 | :param hashed: password in hashed form |
|
125 | 125 | """ |
|
126 | 126 | raise NotImplementedError |
|
127 | 127 | |
|
128 | 128 | def _assert_bytes(self, value): |
|
129 | 129 | """ |
|
130 | 130 | Passing in an `unicode` object can lead to hard to detect issues |
|
131 | 131 | if passwords contain non-ascii characters. Doing a type check |
|
132 | 132 | during runtime, so that such mistakes are detected early on. |
|
133 | 133 | """ |
|
134 | 134 | if not isinstance(value, str): |
|
135 | 135 | raise TypeError( |
|
136 | 136 | "Bytestring required as input, got %r." % (value, )) |
|
137 | 137 | |
|
138 | 138 | |
|
139 | 139 | class _RhodeCodeCryptoBCrypt(_RhodeCodeCryptoBase): |
|
140 | 140 | ENC_PREF = ('$2a$10', '$2b$10') |
|
141 | 141 | |
|
142 | 142 | def hash_create(self, str_): |
|
143 | 143 | self._assert_bytes(str_) |
|
144 | 144 | return bcrypt.hashpw(str_, bcrypt.gensalt(10)) |
|
145 | 145 | |
|
146 | 146 | def hash_check_with_upgrade(self, password, hashed): |
|
147 | 147 | """ |
|
148 | 148 | Returns tuple in which first element is boolean that states that |
|
149 | 149 | given password matches it's hashed version, and the second is new hash |
|
150 | 150 | of the password, in case this password should be migrated to new |
|
151 | 151 | cipher. |
|
152 | 152 | |
|
153 | 153 | This implements special upgrade logic which works like that: |
|
154 | 154 | - check if the given password == bcrypted hash, if yes then we |
|
155 | 155 | properly used password and it was already in bcrypt. Proceed |
|
156 | 156 | without any changes |
|
157 | 157 | - if bcrypt hash check is not working try with sha256. If hash compare |
|
158 | 158 | is ok, it means we using correct but old hashed password. indicate |
|
159 | 159 | hash change and proceed |
|
160 | 160 | """ |
|
161 | 161 | |
|
162 | 162 | new_hash = None |
|
163 | 163 | |
|
164 | 164 | # regular pw check |
|
165 | 165 | password_match_bcrypt = self.hash_check(password, hashed) |
|
166 | 166 | |
|
167 | 167 | # now we want to know if the password was maybe from sha256 |
|
168 | 168 | # basically calling _RhodeCodeCryptoSha256().hash_check() |
|
169 | 169 | if not password_match_bcrypt: |
|
170 | 170 | if _RhodeCodeCryptoSha256().hash_check(password, hashed): |
|
171 | 171 | new_hash = self.hash_create(password) # make new bcrypt hash |
|
172 | 172 | password_match_bcrypt = True |
|
173 | 173 | |
|
174 | 174 | return password_match_bcrypt, new_hash |
|
175 | 175 | |
|
176 | 176 | def hash_check(self, password, hashed): |
|
177 | 177 | """ |
|
178 | 178 | Checks matching password with it's hashed value. |
|
179 | 179 | |
|
180 | 180 | :param password: password |
|
181 | 181 | :param hashed: password in hashed form |
|
182 | 182 | """ |
|
183 | 183 | self._assert_bytes(password) |
|
184 | 184 | try: |
|
185 | 185 | return bcrypt.hashpw(password, hashed) == hashed |
|
186 | 186 | except ValueError as e: |
|
187 | 187 | # we're having a invalid salt here probably, we should not crash |
|
188 | 188 | # just return with False as it would be a wrong password. |
|
189 | 189 | log.debug('Failed to check password hash using bcrypt %s', |
|
190 | 190 | safe_str(e)) |
|
191 | 191 | |
|
192 | 192 | return False |
|
193 | 193 | |
|
194 | 194 | |
|
195 | 195 | class _RhodeCodeCryptoSha256(_RhodeCodeCryptoBase): |
|
196 | 196 | ENC_PREF = '_' |
|
197 | 197 | |
|
198 | 198 | def hash_create(self, str_): |
|
199 | 199 | self._assert_bytes(str_) |
|
200 | 200 | return hashlib.sha256(str_).hexdigest() |
|
201 | 201 | |
|
202 | 202 | def hash_check(self, password, hashed): |
|
203 | 203 | """ |
|
204 | 204 | Checks matching password with it's hashed value. |
|
205 | 205 | |
|
206 | 206 | :param password: password |
|
207 | 207 | :param hashed: password in hashed form |
|
208 | 208 | """ |
|
209 | 209 | self._assert_bytes(password) |
|
210 | 210 | return hashlib.sha256(password).hexdigest() == hashed |
|
211 | 211 | |
|
212 | 212 | |
|
213 | 213 | class _RhodeCodeCryptoTest(_RhodeCodeCryptoBase): |
|
214 | 214 | ENC_PREF = '_' |
|
215 | 215 | |
|
216 | 216 | def hash_create(self, str_): |
|
217 | 217 | self._assert_bytes(str_) |
|
218 | 218 | return sha1(str_) |
|
219 | 219 | |
|
220 | 220 | def hash_check(self, password, hashed): |
|
221 | 221 | """ |
|
222 | 222 | Checks matching password with it's hashed value. |
|
223 | 223 | |
|
224 | 224 | :param password: password |
|
225 | 225 | :param hashed: password in hashed form |
|
226 | 226 | """ |
|
227 | 227 | self._assert_bytes(password) |
|
228 | 228 | return sha1(password) == hashed |
|
229 | 229 | |
|
230 | 230 | |
|
231 | 231 | def crypto_backend(): |
|
232 | 232 | """ |
|
233 | 233 | Return the matching crypto backend. |
|
234 | 234 | |
|
235 | 235 | Selection is based on if we run tests or not, we pick sha1-test backend to run |
|
236 | 236 | tests faster since BCRYPT is expensive to calculate |
|
237 | 237 | """ |
|
238 | 238 | if rhodecode.is_test: |
|
239 | 239 | RhodeCodeCrypto = _RhodeCodeCryptoTest() |
|
240 | 240 | else: |
|
241 | 241 | RhodeCodeCrypto = _RhodeCodeCryptoBCrypt() |
|
242 | 242 | |
|
243 | 243 | return RhodeCodeCrypto |
|
244 | 244 | |
|
245 | 245 | |
|
246 | 246 | def get_crypt_password(password): |
|
247 | 247 | """ |
|
248 | 248 | Create the hash of `password` with the active crypto backend. |
|
249 | 249 | |
|
250 | 250 | :param password: The cleartext password. |
|
251 | 251 | :type password: unicode |
|
252 | 252 | """ |
|
253 | 253 | password = safe_str(password) |
|
254 | 254 | return crypto_backend().hash_create(password) |
|
255 | 255 | |
|
256 | 256 | |
|
257 | 257 | def check_password(password, hashed): |
|
258 | 258 | """ |
|
259 | 259 | Check if the value in `password` matches the hash in `hashed`. |
|
260 | 260 | |
|
261 | 261 | :param password: The cleartext password. |
|
262 | 262 | :type password: unicode |
|
263 | 263 | |
|
264 | 264 | :param hashed: The expected hashed version of the password. |
|
265 | 265 | :type hashed: The hash has to be passed in in text representation. |
|
266 | 266 | """ |
|
267 | 267 | password = safe_str(password) |
|
268 | 268 | return crypto_backend().hash_check(password, hashed) |
|
269 | 269 | |
|
270 | 270 | |
|
271 | 271 | def generate_auth_token(data, salt=None): |
|
272 | 272 | """ |
|
273 | 273 | Generates API KEY from given string |
|
274 | 274 | """ |
|
275 | 275 | |
|
276 | 276 | if salt is None: |
|
277 | 277 | salt = os.urandom(16) |
|
278 | 278 | return hashlib.sha1(safe_str(data) + salt).hexdigest() |
|
279 | 279 | |
|
280 | 280 | |
|
281 | 281 | def get_came_from(request): |
|
282 | 282 | """ |
|
283 | 283 | get query_string+path from request sanitized after removing auth_token |
|
284 | 284 | """ |
|
285 | 285 | _req = request |
|
286 | 286 | |
|
287 | 287 | path = _req.path |
|
288 | 288 | if 'auth_token' in _req.GET: |
|
289 | 289 | # sanitize the request and remove auth_token for redirection |
|
290 | 290 | _req.GET.pop('auth_token') |
|
291 | 291 | qs = _req.query_string |
|
292 | 292 | if qs: |
|
293 | 293 | path += '?' + qs |
|
294 | 294 | |
|
295 | 295 | return path |
|
296 | 296 | |
|
297 | 297 | |
|
298 | 298 | class CookieStoreWrapper(object): |
|
299 | 299 | |
|
300 | 300 | def __init__(self, cookie_store): |
|
301 | 301 | self.cookie_store = cookie_store |
|
302 | 302 | |
|
303 | 303 | def __repr__(self): |
|
304 | 304 | return 'CookieStore<%s>' % (self.cookie_store) |
|
305 | 305 | |
|
306 | 306 | def get(self, key, other=None): |
|
307 | 307 | if isinstance(self.cookie_store, dict): |
|
308 | 308 | return self.cookie_store.get(key, other) |
|
309 | 309 | elif isinstance(self.cookie_store, AuthUser): |
|
310 | 310 | return self.cookie_store.__dict__.get(key, other) |
|
311 | 311 | |
|
312 | 312 | |
|
313 | 313 | def _cached_perms_data(user_id, scope, user_is_admin, |
|
314 | 314 | user_inherit_default_permissions, explicit, algo, |
|
315 | 315 | calculate_super_admin): |
|
316 | 316 | |
|
317 | 317 | permissions = PermissionCalculator( |
|
318 | 318 | user_id, scope, user_is_admin, user_inherit_default_permissions, |
|
319 | 319 | explicit, algo, calculate_super_admin) |
|
320 | 320 | return permissions.calculate() |
|
321 | 321 | |
|
322 | 322 | |
|
323 | 323 | class PermOrigin(object): |
|
324 | 324 | SUPER_ADMIN = 'superadmin' |
|
325 | 325 | |
|
326 | 326 | REPO_USER = 'user:%s' |
|
327 | 327 | REPO_USERGROUP = 'usergroup:%s' |
|
328 | 328 | REPO_OWNER = 'repo.owner' |
|
329 | 329 | REPO_DEFAULT = 'repo.default' |
|
330 | 330 | REPO_DEFAULT_NO_INHERIT = 'repo.default.no.inherit' |
|
331 | 331 | REPO_PRIVATE = 'repo.private' |
|
332 | 332 | |
|
333 | 333 | REPOGROUP_USER = 'user:%s' |
|
334 | 334 | REPOGROUP_USERGROUP = 'usergroup:%s' |
|
335 | 335 | REPOGROUP_OWNER = 'group.owner' |
|
336 | 336 | REPOGROUP_DEFAULT = 'group.default' |
|
337 | 337 | REPOGROUP_DEFAULT_NO_INHERIT = 'group.default.no.inherit' |
|
338 | 338 | |
|
339 | 339 | USERGROUP_USER = 'user:%s' |
|
340 | 340 | USERGROUP_USERGROUP = 'usergroup:%s' |
|
341 | 341 | USERGROUP_OWNER = 'usergroup.owner' |
|
342 | 342 | USERGROUP_DEFAULT = 'usergroup.default' |
|
343 | 343 | USERGROUP_DEFAULT_NO_INHERIT = 'usergroup.default.no.inherit' |
|
344 | 344 | |
|
345 | 345 | |
|
346 | 346 | class PermOriginDict(dict): |
|
347 | 347 | """ |
|
348 | 348 | A special dict used for tracking permissions along with their origins. |
|
349 | 349 | |
|
350 | 350 | `__setitem__` has been overridden to expect a tuple(perm, origin) |
|
351 | 351 | `__getitem__` will return only the perm |
|
352 | 352 | `.perm_origin_stack` will return the stack of (perm, origin) set per key |
|
353 | 353 | |
|
354 | 354 | >>> perms = PermOriginDict() |
|
355 | 355 | >>> perms['resource'] = 'read', 'default' |
|
356 | 356 | >>> perms['resource'] |
|
357 | 357 | 'read' |
|
358 | 358 | >>> perms['resource'] = 'write', 'admin' |
|
359 | 359 | >>> perms['resource'] |
|
360 | 360 | 'write' |
|
361 | 361 | >>> perms.perm_origin_stack |
|
362 | 362 | {'resource': [('read', 'default'), ('write', 'admin')]} |
|
363 | 363 | """ |
|
364 | 364 | |
|
365 | 365 | def __init__(self, *args, **kw): |
|
366 | 366 | dict.__init__(self, *args, **kw) |
|
367 | 367 | self.perm_origin_stack = collections.OrderedDict() |
|
368 | 368 | |
|
369 | 369 | def __setitem__(self, key, (perm, origin)): |
|
370 | 370 | self.perm_origin_stack.setdefault(key, []).append( |
|
371 | 371 | (perm, origin)) |
|
372 | 372 | dict.__setitem__(self, key, perm) |
|
373 | 373 | |
|
374 | 374 | |
|
375 | 375 | class BranchPermOriginDict(PermOriginDict): |
|
376 | 376 | """ |
|
377 | 377 | Dedicated branch permissions dict, with tracking of patterns and origins. |
|
378 | 378 | |
|
379 | 379 | >>> perms = BranchPermOriginDict() |
|
380 | 380 | >>> perms['resource'] = '*pattern', 'read', 'default' |
|
381 | 381 | >>> perms['resource'] |
|
382 | 382 | {'*pattern': 'read'} |
|
383 | 383 | >>> perms['resource'] = '*pattern', 'write', 'admin' |
|
384 | 384 | >>> perms['resource'] |
|
385 | 385 | {'*pattern': 'write'} |
|
386 | 386 | >>> perms.perm_origin_stack |
|
387 | 387 | {'resource': {'*pattern': [('read', 'default'), ('write', 'admin')]}} |
|
388 | 388 | """ |
|
389 | 389 | def __setitem__(self, key, (pattern, perm, origin)): |
|
390 | 390 | |
|
391 | 391 | self.perm_origin_stack.setdefault(key, {}) \ |
|
392 | 392 | .setdefault(pattern, []).append((perm, origin)) |
|
393 | 393 | |
|
394 | 394 | if key in self: |
|
395 | 395 | self[key].__setitem__(pattern, perm) |
|
396 | 396 | else: |
|
397 | 397 | patterns = collections.OrderedDict() |
|
398 | 398 | patterns[pattern] = perm |
|
399 | 399 | dict.__setitem__(self, key, patterns) |
|
400 | 400 | |
|
401 | 401 | |
|
402 | 402 | class PermissionCalculator(object): |
|
403 | 403 | |
|
404 | 404 | def __init__( |
|
405 | 405 | self, user_id, scope, user_is_admin, |
|
406 | 406 | user_inherit_default_permissions, explicit, algo, |
|
407 | 407 | calculate_super_admin_as_user=False): |
|
408 | 408 | |
|
409 | 409 | self.user_id = user_id |
|
410 | 410 | self.user_is_admin = user_is_admin |
|
411 | 411 | self.inherit_default_permissions = user_inherit_default_permissions |
|
412 | 412 | self.explicit = explicit |
|
413 | 413 | self.algo = algo |
|
414 | 414 | self.calculate_super_admin_as_user = calculate_super_admin_as_user |
|
415 | 415 | |
|
416 | 416 | scope = scope or {} |
|
417 | 417 | self.scope_repo_id = scope.get('repo_id') |
|
418 | 418 | self.scope_repo_group_id = scope.get('repo_group_id') |
|
419 | 419 | self.scope_user_group_id = scope.get('user_group_id') |
|
420 | 420 | |
|
421 | 421 | self.default_user_id = User.get_default_user(cache=True).user_id |
|
422 | 422 | |
|
423 | 423 | self.permissions_repositories = PermOriginDict() |
|
424 | 424 | self.permissions_repository_groups = PermOriginDict() |
|
425 | 425 | self.permissions_user_groups = PermOriginDict() |
|
426 | 426 | self.permissions_repository_branches = BranchPermOriginDict() |
|
427 | 427 | self.permissions_global = set() |
|
428 | 428 | |
|
429 | 429 | self.default_repo_perms = Permission.get_default_repo_perms( |
|
430 | 430 | self.default_user_id, self.scope_repo_id) |
|
431 | 431 | self.default_repo_groups_perms = Permission.get_default_group_perms( |
|
432 | 432 | self.default_user_id, self.scope_repo_group_id) |
|
433 | 433 | self.default_user_group_perms = \ |
|
434 | 434 | Permission.get_default_user_group_perms( |
|
435 | 435 | self.default_user_id, self.scope_user_group_id) |
|
436 | 436 | |
|
437 | 437 | # default branch perms |
|
438 | 438 | self.default_branch_repo_perms = \ |
|
439 | 439 | Permission.get_default_repo_branch_perms( |
|
440 | 440 | self.default_user_id, self.scope_repo_id) |
|
441 | 441 | |
|
442 | 442 | def calculate(self): |
|
443 | 443 | if self.user_is_admin and not self.calculate_super_admin_as_user: |
|
444 | 444 | return self._calculate_admin_permissions() |
|
445 | 445 | |
|
446 | 446 | self._calculate_global_default_permissions() |
|
447 | 447 | self._calculate_global_permissions() |
|
448 | 448 | self._calculate_default_permissions() |
|
449 | 449 | self._calculate_repository_permissions() |
|
450 | 450 | self._calculate_repository_branch_permissions() |
|
451 | 451 | self._calculate_repository_group_permissions() |
|
452 | 452 | self._calculate_user_group_permissions() |
|
453 | 453 | return self._permission_structure() |
|
454 | 454 | |
|
455 | 455 | def _calculate_admin_permissions(self): |
|
456 | 456 | """ |
|
457 | 457 | admin user have all default rights for repositories |
|
458 | 458 | and groups set to admin |
|
459 | 459 | """ |
|
460 | 460 | self.permissions_global.add('hg.admin') |
|
461 | 461 | self.permissions_global.add('hg.create.write_on_repogroup.true') |
|
462 | 462 | |
|
463 | 463 | # repositories |
|
464 | 464 | for perm in self.default_repo_perms: |
|
465 | 465 | r_k = perm.UserRepoToPerm.repository.repo_name |
|
466 | 466 | p = 'repository.admin' |
|
467 | 467 | self.permissions_repositories[r_k] = p, PermOrigin.SUPER_ADMIN |
|
468 | 468 | |
|
469 | 469 | # repository groups |
|
470 | 470 | for perm in self.default_repo_groups_perms: |
|
471 | 471 | rg_k = perm.UserRepoGroupToPerm.group.group_name |
|
472 | 472 | p = 'group.admin' |
|
473 | 473 | self.permissions_repository_groups[rg_k] = p, PermOrigin.SUPER_ADMIN |
|
474 | 474 | |
|
475 | 475 | # user groups |
|
476 | 476 | for perm in self.default_user_group_perms: |
|
477 | 477 | u_k = perm.UserUserGroupToPerm.user_group.users_group_name |
|
478 | 478 | p = 'usergroup.admin' |
|
479 | 479 | self.permissions_user_groups[u_k] = p, PermOrigin.SUPER_ADMIN |
|
480 | 480 | |
|
481 | 481 | # branch permissions |
|
482 | 482 | # since super-admin also can have custom rule permissions |
|
483 | 483 | # we *always* need to calculate those inherited from default, and also explicit |
|
484 | 484 | self._calculate_default_permissions_repository_branches( |
|
485 | 485 | user_inherit_object_permissions=False) |
|
486 | 486 | self._calculate_repository_branch_permissions() |
|
487 | 487 | |
|
488 | 488 | return self._permission_structure() |
|
489 | 489 | |
|
490 | 490 | def _calculate_global_default_permissions(self): |
|
491 | 491 | """ |
|
492 | 492 | global permissions taken from the default user |
|
493 | 493 | """ |
|
494 | 494 | default_global_perms = UserToPerm.query()\ |
|
495 | 495 | .filter(UserToPerm.user_id == self.default_user_id)\ |
|
496 | 496 | .options(joinedload(UserToPerm.permission)) |
|
497 | 497 | |
|
498 | 498 | for perm in default_global_perms: |
|
499 | 499 | self.permissions_global.add(perm.permission.permission_name) |
|
500 | 500 | |
|
501 | 501 | if self.user_is_admin: |
|
502 | 502 | self.permissions_global.add('hg.admin') |
|
503 | 503 | self.permissions_global.add('hg.create.write_on_repogroup.true') |
|
504 | 504 | |
|
505 | 505 | def _calculate_global_permissions(self): |
|
506 | 506 | """ |
|
507 | 507 | Set global system permissions with user permissions or permissions |
|
508 | 508 | taken from the user groups of the current user. |
|
509 | 509 | |
|
510 | 510 | The permissions include repo creating, repo group creating, forking |
|
511 | 511 | etc. |
|
512 | 512 | """ |
|
513 | 513 | |
|
514 | 514 | # now we read the defined permissions and overwrite what we have set |
|
515 | 515 | # before those can be configured from groups or users explicitly. |
|
516 | 516 | |
|
517 | 517 | # In case we want to extend this list we should make sure |
|
518 | 518 | # this is in sync with User.DEFAULT_USER_PERMISSIONS definitions |
|
519 | 519 | _configurable = frozenset([ |
|
520 | 520 | 'hg.fork.none', 'hg.fork.repository', |
|
521 | 521 | 'hg.create.none', 'hg.create.repository', |
|
522 | 522 | 'hg.usergroup.create.false', 'hg.usergroup.create.true', |
|
523 | 523 | 'hg.repogroup.create.false', 'hg.repogroup.create.true', |
|
524 | 524 | 'hg.create.write_on_repogroup.false', 'hg.create.write_on_repogroup.true', |
|
525 | 525 | 'hg.inherit_default_perms.false', 'hg.inherit_default_perms.true' |
|
526 | 526 | ]) |
|
527 | 527 | |
|
528 | 528 | # USER GROUPS comes first user group global permissions |
|
529 | 529 | user_perms_from_users_groups = Session().query(UserGroupToPerm)\ |
|
530 | 530 | .options(joinedload(UserGroupToPerm.permission))\ |
|
531 | 531 | .join((UserGroupMember, UserGroupToPerm.users_group_id == |
|
532 | 532 | UserGroupMember.users_group_id))\ |
|
533 | 533 | .filter(UserGroupMember.user_id == self.user_id)\ |
|
534 | 534 | .order_by(UserGroupToPerm.users_group_id)\ |
|
535 | 535 | .all() |
|
536 | 536 | |
|
537 | 537 | # need to group here by groups since user can be in more than |
|
538 | 538 | # one group, so we get all groups |
|
539 | 539 | _explicit_grouped_perms = [ |
|
540 | 540 | [x, list(y)] for x, y in |
|
541 | 541 | itertools.groupby(user_perms_from_users_groups, |
|
542 | 542 | lambda _x: _x.users_group)] |
|
543 | 543 | |
|
544 | 544 | for gr, perms in _explicit_grouped_perms: |
|
545 | 545 | # since user can be in multiple groups iterate over them and |
|
546 | 546 | # select the lowest permissions first (more explicit) |
|
547 | 547 | # TODO(marcink): do this^^ |
|
548 | 548 | |
|
549 | 549 | # group doesn't inherit default permissions so we actually set them |
|
550 | 550 | if not gr.inherit_default_permissions: |
|
551 | 551 | # NEED TO IGNORE all previously set configurable permissions |
|
552 | 552 | # and replace them with explicitly set from this user |
|
553 | 553 | # group permissions |
|
554 | 554 | self.permissions_global = self.permissions_global.difference( |
|
555 | 555 | _configurable) |
|
556 | 556 | for perm in perms: |
|
557 | 557 | self.permissions_global.add(perm.permission.permission_name) |
|
558 | 558 | |
|
559 | 559 | # user explicit global permissions |
|
560 | 560 | user_perms = Session().query(UserToPerm)\ |
|
561 | 561 | .options(joinedload(UserToPerm.permission))\ |
|
562 | 562 | .filter(UserToPerm.user_id == self.user_id).all() |
|
563 | 563 | |
|
564 | 564 | if not self.inherit_default_permissions: |
|
565 | 565 | # NEED TO IGNORE all configurable permissions and |
|
566 | 566 | # replace them with explicitly set from this user permissions |
|
567 | 567 | self.permissions_global = self.permissions_global.difference( |
|
568 | 568 | _configurable) |
|
569 | 569 | for perm in user_perms: |
|
570 | 570 | self.permissions_global.add(perm.permission.permission_name) |
|
571 | 571 | |
|
572 | 572 | def _calculate_default_permissions_repositories(self, user_inherit_object_permissions): |
|
573 | 573 | for perm in self.default_repo_perms: |
|
574 | 574 | r_k = perm.UserRepoToPerm.repository.repo_name |
|
575 | 575 | p = perm.Permission.permission_name |
|
576 | 576 | o = PermOrigin.REPO_DEFAULT |
|
577 | 577 | self.permissions_repositories[r_k] = p, o |
|
578 | 578 | |
|
579 | 579 | # if we decide this user isn't inheriting permissions from |
|
580 | 580 | # default user we set him to .none so only explicit |
|
581 | 581 | # permissions work |
|
582 | 582 | if not user_inherit_object_permissions: |
|
583 | 583 | p = 'repository.none' |
|
584 | 584 | o = PermOrigin.REPO_DEFAULT_NO_INHERIT |
|
585 | 585 | self.permissions_repositories[r_k] = p, o |
|
586 | 586 | |
|
587 | 587 | if perm.Repository.private and not ( |
|
588 | 588 | perm.Repository.user_id == self.user_id): |
|
589 | 589 | # disable defaults for private repos, |
|
590 | 590 | p = 'repository.none' |
|
591 | 591 | o = PermOrigin.REPO_PRIVATE |
|
592 | 592 | self.permissions_repositories[r_k] = p, o |
|
593 | 593 | |
|
594 | 594 | elif perm.Repository.user_id == self.user_id: |
|
595 | 595 | # set admin if owner |
|
596 | 596 | p = 'repository.admin' |
|
597 | 597 | o = PermOrigin.REPO_OWNER |
|
598 | 598 | self.permissions_repositories[r_k] = p, o |
|
599 | 599 | |
|
600 | 600 | if self.user_is_admin: |
|
601 | 601 | p = 'repository.admin' |
|
602 | 602 | o = PermOrigin.SUPER_ADMIN |
|
603 | 603 | self.permissions_repositories[r_k] = p, o |
|
604 | 604 | |
|
605 | 605 | def _calculate_default_permissions_repository_branches(self, user_inherit_object_permissions): |
|
606 | 606 | for perm in self.default_branch_repo_perms: |
|
607 | 607 | |
|
608 | 608 | r_k = perm.UserRepoToPerm.repository.repo_name |
|
609 | 609 | p = perm.Permission.permission_name |
|
610 | 610 | pattern = perm.UserToRepoBranchPermission.branch_pattern |
|
611 | 611 | o = PermOrigin.REPO_USER % perm.UserRepoToPerm.user.username |
|
612 | 612 | |
|
613 | 613 | if not self.explicit: |
|
614 | 614 | # TODO(marcink): fix this for multiple entries |
|
615 | 615 | cur_perm = self.permissions_repository_branches.get(r_k) or 'branch.none' |
|
616 | 616 | p = self._choose_permission(p, cur_perm) |
|
617 | 617 | |
|
618 | 618 | # NOTE(marcink): register all pattern/perm instances in this |
|
619 | 619 | # special dict that aggregates entries |
|
620 | 620 | self.permissions_repository_branches[r_k] = pattern, p, o |
|
621 | 621 | |
|
622 | 622 | def _calculate_default_permissions_repository_groups(self, user_inherit_object_permissions): |
|
623 | 623 | for perm in self.default_repo_groups_perms: |
|
624 | 624 | rg_k = perm.UserRepoGroupToPerm.group.group_name |
|
625 | 625 | p = perm.Permission.permission_name |
|
626 | 626 | o = PermOrigin.REPOGROUP_DEFAULT |
|
627 | 627 | self.permissions_repository_groups[rg_k] = p, o |
|
628 | 628 | |
|
629 | 629 | # if we decide this user isn't inheriting permissions from default |
|
630 | 630 | # user we set him to .none so only explicit permissions work |
|
631 | 631 | if not user_inherit_object_permissions: |
|
632 | 632 | p = 'group.none' |
|
633 | 633 | o = PermOrigin.REPOGROUP_DEFAULT_NO_INHERIT |
|
634 | 634 | self.permissions_repository_groups[rg_k] = p, o |
|
635 | 635 | |
|
636 | 636 | if perm.RepoGroup.user_id == self.user_id: |
|
637 | 637 | # set admin if owner |
|
638 | 638 | p = 'group.admin' |
|
639 | 639 | o = PermOrigin.REPOGROUP_OWNER |
|
640 | 640 | self.permissions_repository_groups[rg_k] = p, o |
|
641 | 641 | |
|
642 | 642 | if self.user_is_admin: |
|
643 | 643 | p = 'group.admin' |
|
644 | 644 | o = PermOrigin.SUPER_ADMIN |
|
645 | 645 | self.permissions_repository_groups[rg_k] = p, o |
|
646 | 646 | |
|
647 | 647 | def _calculate_default_permissions_user_groups(self, user_inherit_object_permissions): |
|
648 | 648 | for perm in self.default_user_group_perms: |
|
649 | 649 | u_k = perm.UserUserGroupToPerm.user_group.users_group_name |
|
650 | 650 | p = perm.Permission.permission_name |
|
651 | 651 | o = PermOrigin.USERGROUP_DEFAULT |
|
652 | 652 | self.permissions_user_groups[u_k] = p, o |
|
653 | 653 | |
|
654 | 654 | # if we decide this user isn't inheriting permissions from default |
|
655 | 655 | # user we set him to .none so only explicit permissions work |
|
656 | 656 | if not user_inherit_object_permissions: |
|
657 | 657 | p = 'usergroup.none' |
|
658 | 658 | o = PermOrigin.USERGROUP_DEFAULT_NO_INHERIT |
|
659 | 659 | self.permissions_user_groups[u_k] = p, o |
|
660 | 660 | |
|
661 | 661 | if perm.UserGroup.user_id == self.user_id: |
|
662 | 662 | # set admin if owner |
|
663 | 663 | p = 'usergroup.admin' |
|
664 | 664 | o = PermOrigin.USERGROUP_OWNER |
|
665 | 665 | self.permissions_user_groups[u_k] = p, o |
|
666 | 666 | |
|
667 | 667 | if self.user_is_admin: |
|
668 | 668 | p = 'usergroup.admin' |
|
669 | 669 | o = PermOrigin.SUPER_ADMIN |
|
670 | 670 | self.permissions_user_groups[u_k] = p, o |
|
671 | 671 | |
|
672 | 672 | def _calculate_default_permissions(self): |
|
673 | 673 | """ |
|
674 | 674 | Set default user permissions for repositories, repository branches, |
|
675 | 675 | repository groups, user groups taken from the default user. |
|
676 | 676 | |
|
677 | 677 | Calculate inheritance of object permissions based on what we have now |
|
678 | 678 | in GLOBAL permissions. We check if .false is in GLOBAL since this is |
|
679 | 679 | explicitly set. Inherit is the opposite of .false being there. |
|
680 | 680 | |
|
681 | 681 | .. note:: |
|
682 | 682 | |
|
683 | 683 | the syntax is little bit odd but what we need to check here is |
|
684 | 684 | the opposite of .false permission being in the list so even for |
|
685 | 685 | inconsistent state when both .true/.false is there |
|
686 | 686 | .false is more important |
|
687 | 687 | |
|
688 | 688 | """ |
|
689 | 689 | user_inherit_object_permissions = not ('hg.inherit_default_perms.false' |
|
690 | 690 | in self.permissions_global) |
|
691 | 691 | |
|
692 | 692 | # default permissions inherited from `default` user permissions |
|
693 | 693 | self._calculate_default_permissions_repositories( |
|
694 | 694 | user_inherit_object_permissions) |
|
695 | 695 | |
|
696 | 696 | self._calculate_default_permissions_repository_branches( |
|
697 | 697 | user_inherit_object_permissions) |
|
698 | 698 | |
|
699 | 699 | self._calculate_default_permissions_repository_groups( |
|
700 | 700 | user_inherit_object_permissions) |
|
701 | 701 | |
|
702 | 702 | self._calculate_default_permissions_user_groups( |
|
703 | 703 | user_inherit_object_permissions) |
|
704 | 704 | |
|
705 | 705 | def _calculate_repository_permissions(self): |
|
706 | 706 | """ |
|
707 | 707 | Repository permissions for the current user. |
|
708 | 708 | |
|
709 | 709 | Check if the user is part of user groups for this repository and |
|
710 | 710 | fill in the permission from it. `_choose_permission` decides of which |
|
711 | 711 | permission should be selected based on selected method. |
|
712 | 712 | """ |
|
713 | 713 | |
|
714 | 714 | # user group for repositories permissions |
|
715 | 715 | user_repo_perms_from_user_group = Permission\ |
|
716 | 716 | .get_default_repo_perms_from_user_group( |
|
717 | 717 | self.user_id, self.scope_repo_id) |
|
718 | 718 | |
|
719 | 719 | multiple_counter = collections.defaultdict(int) |
|
720 | 720 | for perm in user_repo_perms_from_user_group: |
|
721 | 721 | r_k = perm.UserGroupRepoToPerm.repository.repo_name |
|
722 | 722 | multiple_counter[r_k] += 1 |
|
723 | 723 | p = perm.Permission.permission_name |
|
724 | 724 | o = PermOrigin.REPO_USERGROUP % perm.UserGroupRepoToPerm\ |
|
725 | 725 | .users_group.users_group_name |
|
726 | 726 | |
|
727 | 727 | if multiple_counter[r_k] > 1: |
|
728 | 728 | cur_perm = self.permissions_repositories[r_k] |
|
729 | 729 | p = self._choose_permission(p, cur_perm) |
|
730 | 730 | |
|
731 | 731 | self.permissions_repositories[r_k] = p, o |
|
732 | 732 | |
|
733 | 733 | if perm.Repository.user_id == self.user_id: |
|
734 | 734 | # set admin if owner |
|
735 | 735 | p = 'repository.admin' |
|
736 | 736 | o = PermOrigin.REPO_OWNER |
|
737 | 737 | self.permissions_repositories[r_k] = p, o |
|
738 | 738 | |
|
739 | 739 | if self.user_is_admin: |
|
740 | 740 | p = 'repository.admin' |
|
741 | 741 | o = PermOrigin.SUPER_ADMIN |
|
742 | 742 | self.permissions_repositories[r_k] = p, o |
|
743 | 743 | |
|
744 | 744 | # user explicit permissions for repositories, overrides any specified |
|
745 | 745 | # by the group permission |
|
746 | 746 | user_repo_perms = Permission.get_default_repo_perms( |
|
747 | 747 | self.user_id, self.scope_repo_id) |
|
748 | 748 | for perm in user_repo_perms: |
|
749 | 749 | r_k = perm.UserRepoToPerm.repository.repo_name |
|
750 | 750 | p = perm.Permission.permission_name |
|
751 | 751 | o = PermOrigin.REPO_USER % perm.UserRepoToPerm.user.username |
|
752 | 752 | |
|
753 | 753 | if not self.explicit: |
|
754 | 754 | cur_perm = self.permissions_repositories.get( |
|
755 | 755 | r_k, 'repository.none') |
|
756 | 756 | p = self._choose_permission(p, cur_perm) |
|
757 | 757 | |
|
758 | 758 | self.permissions_repositories[r_k] = p, o |
|
759 | 759 | |
|
760 | 760 | if perm.Repository.user_id == self.user_id: |
|
761 | 761 | # set admin if owner |
|
762 | 762 | p = 'repository.admin' |
|
763 | 763 | o = PermOrigin.REPO_OWNER |
|
764 | 764 | self.permissions_repositories[r_k] = p, o |
|
765 | 765 | |
|
766 | 766 | if self.user_is_admin: |
|
767 | 767 | p = 'repository.admin' |
|
768 | 768 | o = PermOrigin.SUPER_ADMIN |
|
769 | 769 | self.permissions_repositories[r_k] = p, o |
|
770 | 770 | |
|
771 | 771 | def _calculate_repository_branch_permissions(self): |
|
772 | 772 | # user group for repositories permissions |
|
773 | 773 | user_repo_branch_perms_from_user_group = Permission\ |
|
774 | 774 | .get_default_repo_branch_perms_from_user_group( |
|
775 | 775 | self.user_id, self.scope_repo_id) |
|
776 | 776 | |
|
777 | 777 | multiple_counter = collections.defaultdict(int) |
|
778 | 778 | for perm in user_repo_branch_perms_from_user_group: |
|
779 | 779 | r_k = perm.UserGroupRepoToPerm.repository.repo_name |
|
780 | 780 | p = perm.Permission.permission_name |
|
781 | 781 | pattern = perm.UserGroupToRepoBranchPermission.branch_pattern |
|
782 | 782 | o = PermOrigin.REPO_USERGROUP % perm.UserGroupRepoToPerm\ |
|
783 | 783 | .users_group.users_group_name |
|
784 | 784 | |
|
785 | 785 | multiple_counter[r_k] += 1 |
|
786 | 786 | if multiple_counter[r_k] > 1: |
|
787 | 787 | # TODO(marcink): fix this for multi branch support, and multiple entries |
|
788 | 788 | cur_perm = self.permissions_repository_branches[r_k] |
|
789 | 789 | p = self._choose_permission(p, cur_perm) |
|
790 | 790 | |
|
791 | 791 | self.permissions_repository_branches[r_k] = pattern, p, o |
|
792 | 792 | |
|
793 | 793 | # user explicit branch permissions for repositories, overrides |
|
794 | 794 | # any specified by the group permission |
|
795 | 795 | user_repo_branch_perms = Permission.get_default_repo_branch_perms( |
|
796 | 796 | self.user_id, self.scope_repo_id) |
|
797 | 797 | |
|
798 | 798 | for perm in user_repo_branch_perms: |
|
799 | 799 | |
|
800 | 800 | r_k = perm.UserRepoToPerm.repository.repo_name |
|
801 | 801 | p = perm.Permission.permission_name |
|
802 | 802 | pattern = perm.UserToRepoBranchPermission.branch_pattern |
|
803 | 803 | o = PermOrigin.REPO_USER % perm.UserRepoToPerm.user.username |
|
804 | 804 | |
|
805 | 805 | if not self.explicit: |
|
806 | 806 | # TODO(marcink): fix this for multiple entries |
|
807 | 807 | cur_perm = self.permissions_repository_branches.get(r_k) or 'branch.none' |
|
808 | 808 | p = self._choose_permission(p, cur_perm) |
|
809 | 809 | |
|
810 | 810 | # NOTE(marcink): register all pattern/perm instances in this |
|
811 | 811 | # special dict that aggregates entries |
|
812 | 812 | self.permissions_repository_branches[r_k] = pattern, p, o |
|
813 | 813 | |
|
814 | 814 | def _calculate_repository_group_permissions(self): |
|
815 | 815 | """ |
|
816 | 816 | Repository group permissions for the current user. |
|
817 | 817 | |
|
818 | 818 | Check if the user is part of user groups for repository groups and |
|
819 | 819 | fill in the permissions from it. `_choose_permission` decides of which |
|
820 | 820 | permission should be selected based on selected method. |
|
821 | 821 | """ |
|
822 | 822 | # user group for repo groups permissions |
|
823 | 823 | user_repo_group_perms_from_user_group = Permission\ |
|
824 | 824 | .get_default_group_perms_from_user_group( |
|
825 | 825 | self.user_id, self.scope_repo_group_id) |
|
826 | 826 | |
|
827 | 827 | multiple_counter = collections.defaultdict(int) |
|
828 | 828 | for perm in user_repo_group_perms_from_user_group: |
|
829 | 829 | rg_k = perm.UserGroupRepoGroupToPerm.group.group_name |
|
830 | 830 | multiple_counter[rg_k] += 1 |
|
831 | 831 | o = PermOrigin.REPOGROUP_USERGROUP % perm.UserGroupRepoGroupToPerm\ |
|
832 | 832 | .users_group.users_group_name |
|
833 | 833 | p = perm.Permission.permission_name |
|
834 | 834 | |
|
835 | 835 | if multiple_counter[rg_k] > 1: |
|
836 | 836 | cur_perm = self.permissions_repository_groups[rg_k] |
|
837 | 837 | p = self._choose_permission(p, cur_perm) |
|
838 | 838 | self.permissions_repository_groups[rg_k] = p, o |
|
839 | 839 | |
|
840 | 840 | if perm.RepoGroup.user_id == self.user_id: |
|
841 | 841 | # set admin if owner, even for member of other user group |
|
842 | 842 | p = 'group.admin' |
|
843 | 843 | o = PermOrigin.REPOGROUP_OWNER |
|
844 | 844 | self.permissions_repository_groups[rg_k] = p, o |
|
845 | 845 | |
|
846 | 846 | if self.user_is_admin: |
|
847 | 847 | p = 'group.admin' |
|
848 | 848 | o = PermOrigin.SUPER_ADMIN |
|
849 | 849 | self.permissions_repository_groups[rg_k] = p, o |
|
850 | 850 | |
|
851 | 851 | # user explicit permissions for repository groups |
|
852 | 852 | user_repo_groups_perms = Permission.get_default_group_perms( |
|
853 | 853 | self.user_id, self.scope_repo_group_id) |
|
854 | 854 | for perm in user_repo_groups_perms: |
|
855 | 855 | rg_k = perm.UserRepoGroupToPerm.group.group_name |
|
856 | 856 | o = PermOrigin.REPOGROUP_USER % perm.UserRepoGroupToPerm\ |
|
857 | 857 | .user.username |
|
858 | 858 | p = perm.Permission.permission_name |
|
859 | 859 | |
|
860 | 860 | if not self.explicit: |
|
861 | 861 | cur_perm = self.permissions_repository_groups.get( |
|
862 | 862 | rg_k, 'group.none') |
|
863 | 863 | p = self._choose_permission(p, cur_perm) |
|
864 | 864 | |
|
865 | 865 | self.permissions_repository_groups[rg_k] = p, o |
|
866 | 866 | |
|
867 | 867 | if perm.RepoGroup.user_id == self.user_id: |
|
868 | 868 | # set admin if owner |
|
869 | 869 | p = 'group.admin' |
|
870 | 870 | o = PermOrigin.REPOGROUP_OWNER |
|
871 | 871 | self.permissions_repository_groups[rg_k] = p, o |
|
872 | 872 | |
|
873 | 873 | if self.user_is_admin: |
|
874 | 874 | p = 'group.admin' |
|
875 | 875 | o = PermOrigin.SUPER_ADMIN |
|
876 | 876 | self.permissions_repository_groups[rg_k] = p, o |
|
877 | 877 | |
|
878 | 878 | def _calculate_user_group_permissions(self): |
|
879 | 879 | """ |
|
880 | 880 | User group permissions for the current user. |
|
881 | 881 | """ |
|
882 | 882 | # user group for user group permissions |
|
883 | 883 | user_group_from_user_group = Permission\ |
|
884 | 884 | .get_default_user_group_perms_from_user_group( |
|
885 | 885 | self.user_id, self.scope_user_group_id) |
|
886 | 886 | |
|
887 | 887 | multiple_counter = collections.defaultdict(int) |
|
888 | 888 | for perm in user_group_from_user_group: |
|
889 | 889 | ug_k = perm.UserGroupUserGroupToPerm\ |
|
890 | 890 | .target_user_group.users_group_name |
|
891 | 891 | multiple_counter[ug_k] += 1 |
|
892 | 892 | o = PermOrigin.USERGROUP_USERGROUP % perm.UserGroupUserGroupToPerm\ |
|
893 | 893 | .user_group.users_group_name |
|
894 | 894 | p = perm.Permission.permission_name |
|
895 | 895 | |
|
896 | 896 | if multiple_counter[ug_k] > 1: |
|
897 | 897 | cur_perm = self.permissions_user_groups[ug_k] |
|
898 | 898 | p = self._choose_permission(p, cur_perm) |
|
899 | 899 | |
|
900 | 900 | self.permissions_user_groups[ug_k] = p, o |
|
901 | 901 | |
|
902 | 902 | if perm.UserGroup.user_id == self.user_id: |
|
903 | 903 | # set admin if owner, even for member of other user group |
|
904 | 904 | p = 'usergroup.admin' |
|
905 | 905 | o = PermOrigin.USERGROUP_OWNER |
|
906 | 906 | self.permissions_user_groups[ug_k] = p, o |
|
907 | 907 | |
|
908 | 908 | if self.user_is_admin: |
|
909 | 909 | p = 'usergroup.admin' |
|
910 | 910 | o = PermOrigin.SUPER_ADMIN |
|
911 | 911 | self.permissions_user_groups[ug_k] = p, o |
|
912 | 912 | |
|
913 | 913 | # user explicit permission for user groups |
|
914 | 914 | user_user_groups_perms = Permission.get_default_user_group_perms( |
|
915 | 915 | self.user_id, self.scope_user_group_id) |
|
916 | 916 | for perm in user_user_groups_perms: |
|
917 | 917 | ug_k = perm.UserUserGroupToPerm.user_group.users_group_name |
|
918 | 918 | o = PermOrigin.USERGROUP_USER % perm.UserUserGroupToPerm\ |
|
919 | 919 | .user.username |
|
920 | 920 | p = perm.Permission.permission_name |
|
921 | 921 | |
|
922 | 922 | if not self.explicit: |
|
923 | 923 | cur_perm = self.permissions_user_groups.get( |
|
924 | 924 | ug_k, 'usergroup.none') |
|
925 | 925 | p = self._choose_permission(p, cur_perm) |
|
926 | 926 | |
|
927 | 927 | self.permissions_user_groups[ug_k] = p, o |
|
928 | 928 | |
|
929 | 929 | if perm.UserGroup.user_id == self.user_id: |
|
930 | 930 | # set admin if owner |
|
931 | 931 | p = 'usergroup.admin' |
|
932 | 932 | o = PermOrigin.USERGROUP_OWNER |
|
933 | 933 | self.permissions_user_groups[ug_k] = p, o |
|
934 | 934 | |
|
935 | 935 | if self.user_is_admin: |
|
936 | 936 | p = 'usergroup.admin' |
|
937 | 937 | o = PermOrigin.SUPER_ADMIN |
|
938 | 938 | self.permissions_user_groups[ug_k] = p, o |
|
939 | 939 | |
|
940 | 940 | def _choose_permission(self, new_perm, cur_perm): |
|
941 | 941 | new_perm_val = Permission.PERM_WEIGHTS[new_perm] |
|
942 | 942 | cur_perm_val = Permission.PERM_WEIGHTS[cur_perm] |
|
943 | 943 | if self.algo == 'higherwin': |
|
944 | 944 | if new_perm_val > cur_perm_val: |
|
945 | 945 | return new_perm |
|
946 | 946 | return cur_perm |
|
947 | 947 | elif self.algo == 'lowerwin': |
|
948 | 948 | if new_perm_val < cur_perm_val: |
|
949 | 949 | return new_perm |
|
950 | 950 | return cur_perm |
|
951 | 951 | |
|
952 | 952 | def _permission_structure(self): |
|
953 | 953 | return { |
|
954 | 954 | 'global': self.permissions_global, |
|
955 | 955 | 'repositories': self.permissions_repositories, |
|
956 | 956 | 'repository_branches': self.permissions_repository_branches, |
|
957 | 957 | 'repositories_groups': self.permissions_repository_groups, |
|
958 | 958 | 'user_groups': self.permissions_user_groups, |
|
959 | 959 | } |
|
960 | 960 | |
|
961 | 961 | |
|
962 | 962 | def allowed_auth_token_access(view_name, auth_token, whitelist=None): |
|
963 | 963 | """ |
|
964 | 964 | Check if given controller_name is in whitelist of auth token access |
|
965 | 965 | """ |
|
966 | 966 | if not whitelist: |
|
967 | 967 | from rhodecode import CONFIG |
|
968 | 968 | whitelist = aslist( |
|
969 | 969 | CONFIG.get('api_access_controllers_whitelist'), sep=',') |
|
970 | 970 | # backward compat translation |
|
971 | 971 | compat = { |
|
972 | 972 | # old controller, new VIEW |
|
973 | 973 | 'ChangesetController:*': 'RepoCommitsView:*', |
|
974 | 974 | 'ChangesetController:changeset_patch': 'RepoCommitsView:repo_commit_patch', |
|
975 | 975 | 'ChangesetController:changeset_raw': 'RepoCommitsView:repo_commit_raw', |
|
976 | 976 | 'FilesController:raw': 'RepoCommitsView:repo_commit_raw', |
|
977 | 977 | 'FilesController:archivefile': 'RepoFilesView:repo_archivefile', |
|
978 | 978 | 'GistsController:*': 'GistView:*', |
|
979 | 979 | } |
|
980 | 980 | |
|
981 | 981 | log.debug( |
|
982 |
'Allowed views for AUTH TOKEN access: %s' |
|
|
982 | 'Allowed views for AUTH TOKEN access: %s', whitelist) | |
|
983 | 983 | auth_token_access_valid = False |
|
984 | 984 | |
|
985 | 985 | for entry in whitelist: |
|
986 | 986 | token_match = True |
|
987 | 987 | if entry in compat: |
|
988 | 988 | # translate from old Controllers to Pyramid Views |
|
989 | 989 | entry = compat[entry] |
|
990 | 990 | |
|
991 | 991 | if '@' in entry: |
|
992 | 992 | # specific AuthToken |
|
993 | 993 | entry, allowed_token = entry.split('@', 1) |
|
994 | 994 | token_match = auth_token == allowed_token |
|
995 | 995 | |
|
996 | 996 | if fnmatch.fnmatch(view_name, entry) and token_match: |
|
997 | 997 | auth_token_access_valid = True |
|
998 | 998 | break |
|
999 | 999 | |
|
1000 | 1000 | if auth_token_access_valid: |
|
1001 | log.debug('view: `%s` matches entry in whitelist: %s' | |
|
1002 |
|
|
|
1001 | log.debug('view: `%s` matches entry in whitelist: %s', | |
|
1002 | view_name, whitelist) | |
|
1003 | ||
|
1003 | 1004 | else: |
|
1004 | 1005 | msg = ('view: `%s` does *NOT* match any entry in whitelist: %s' |
|
1005 | 1006 | % (view_name, whitelist)) |
|
1006 | 1007 | if auth_token: |
|
1007 | 1008 | # if we use auth token key and don't have access it's a warning |
|
1008 | 1009 | log.warning(msg) |
|
1009 | 1010 | else: |
|
1010 | 1011 | log.debug(msg) |
|
1011 | 1012 | |
|
1012 | 1013 | return auth_token_access_valid |
|
1013 | 1014 | |
|
1014 | 1015 | |
|
1015 | 1016 | class AuthUser(object): |
|
1016 | 1017 | """ |
|
1017 | 1018 | A simple object that handles all attributes of user in RhodeCode |
|
1018 | 1019 | |
|
1019 | 1020 | It does lookup based on API key,given user, or user present in session |
|
1020 | 1021 | Then it fills all required information for such user. It also checks if |
|
1021 | 1022 | anonymous access is enabled and if so, it returns default user as logged in |
|
1022 | 1023 | """ |
|
1023 | 1024 | GLOBAL_PERMS = [x[0] for x in Permission.PERMS] |
|
1024 | 1025 | |
|
1025 | 1026 | def __init__(self, user_id=None, api_key=None, username=None, ip_addr=None): |
|
1026 | 1027 | |
|
1027 | 1028 | self.user_id = user_id |
|
1028 | 1029 | self._api_key = api_key |
|
1029 | 1030 | |
|
1030 | 1031 | self.api_key = None |
|
1031 | 1032 | self.username = username |
|
1032 | 1033 | self.ip_addr = ip_addr |
|
1033 | 1034 | self.name = '' |
|
1034 | 1035 | self.lastname = '' |
|
1035 | 1036 | self.first_name = '' |
|
1036 | 1037 | self.last_name = '' |
|
1037 | 1038 | self.email = '' |
|
1038 | 1039 | self.is_authenticated = False |
|
1039 | 1040 | self.admin = False |
|
1040 | 1041 | self.inherit_default_permissions = False |
|
1041 | 1042 | self.password = '' |
|
1042 | 1043 | |
|
1043 | 1044 | self.anonymous_user = None # propagated on propagate_data |
|
1044 | 1045 | self.propagate_data() |
|
1045 | 1046 | self._instance = None |
|
1046 | 1047 | self._permissions_scoped_cache = {} # used to bind scoped calculation |
|
1047 | 1048 | |
|
1048 | 1049 | @LazyProperty |
|
1049 | 1050 | def permissions(self): |
|
1050 | 1051 | return self.get_perms(user=self, cache=None) |
|
1051 | 1052 | |
|
1052 | 1053 | @LazyProperty |
|
1053 | 1054 | def permissions_safe(self): |
|
1054 | 1055 | """ |
|
1055 | 1056 | Filtered permissions excluding not allowed repositories |
|
1056 | 1057 | """ |
|
1057 | 1058 | perms = self.get_perms(user=self, cache=None) |
|
1058 | 1059 | |
|
1059 | 1060 | perms['repositories'] = { |
|
1060 | 1061 | k: v for k, v in perms['repositories'].items() |
|
1061 | 1062 | if v != 'repository.none'} |
|
1062 | 1063 | perms['repositories_groups'] = { |
|
1063 | 1064 | k: v for k, v in perms['repositories_groups'].items() |
|
1064 | 1065 | if v != 'group.none'} |
|
1065 | 1066 | perms['user_groups'] = { |
|
1066 | 1067 | k: v for k, v in perms['user_groups'].items() |
|
1067 | 1068 | if v != 'usergroup.none'} |
|
1068 | 1069 | perms['repository_branches'] = { |
|
1069 | 1070 | k: v for k, v in perms['repository_branches'].iteritems() |
|
1070 | 1071 | if v != 'branch.none'} |
|
1071 | 1072 | return perms |
|
1072 | 1073 | |
|
1073 | 1074 | @LazyProperty |
|
1074 | 1075 | def permissions_full_details(self): |
|
1075 | 1076 | return self.get_perms( |
|
1076 | 1077 | user=self, cache=None, calculate_super_admin=True) |
|
1077 | 1078 | |
|
1078 | 1079 | def permissions_with_scope(self, scope): |
|
1079 | 1080 | """ |
|
1080 | 1081 | Call the get_perms function with scoped data. The scope in that function |
|
1081 | 1082 | narrows the SQL calls to the given ID of objects resulting in fetching |
|
1082 | 1083 | Just particular permission we want to obtain. If scope is an empty dict |
|
1083 | 1084 | then it basically narrows the scope to GLOBAL permissions only. |
|
1084 | 1085 | |
|
1085 | 1086 | :param scope: dict |
|
1086 | 1087 | """ |
|
1087 | 1088 | if 'repo_name' in scope: |
|
1088 | 1089 | obj = Repository.get_by_repo_name(scope['repo_name']) |
|
1089 | 1090 | if obj: |
|
1090 | 1091 | scope['repo_id'] = obj.repo_id |
|
1091 | 1092 | _scope = collections.OrderedDict() |
|
1092 | 1093 | _scope['repo_id'] = -1 |
|
1093 | 1094 | _scope['user_group_id'] = -1 |
|
1094 | 1095 | _scope['repo_group_id'] = -1 |
|
1095 | 1096 | |
|
1096 | 1097 | for k in sorted(scope.keys()): |
|
1097 | 1098 | _scope[k] = scope[k] |
|
1098 | 1099 | |
|
1099 | 1100 | # store in cache to mimic how the @LazyProperty works, |
|
1100 | 1101 | # the difference here is that we use the unique key calculated |
|
1101 | 1102 | # from params and values |
|
1102 | 1103 | return self.get_perms(user=self, cache=None, scope=_scope) |
|
1103 | 1104 | |
|
1104 | 1105 | def get_instance(self): |
|
1105 | 1106 | return User.get(self.user_id) |
|
1106 | 1107 | |
|
1107 | 1108 | def propagate_data(self): |
|
1108 | 1109 | """ |
|
1109 | 1110 | Fills in user data and propagates values to this instance. Maps fetched |
|
1110 | 1111 | user attributes to this class instance attributes |
|
1111 | 1112 | """ |
|
1112 | 1113 | log.debug('AuthUser: starting data propagation for new potential user') |
|
1113 | 1114 | user_model = UserModel() |
|
1114 | 1115 | anon_user = self.anonymous_user = User.get_default_user(cache=True) |
|
1115 | 1116 | is_user_loaded = False |
|
1116 | 1117 | |
|
1117 | 1118 | # lookup by userid |
|
1118 | 1119 | if self.user_id is not None and self.user_id != anon_user.user_id: |
|
1119 | 1120 | log.debug('Trying Auth User lookup by USER ID: `%s`', self.user_id) |
|
1120 | 1121 | is_user_loaded = user_model.fill_data(self, user_id=self.user_id) |
|
1121 | 1122 | |
|
1122 | 1123 | # try go get user by api key |
|
1123 | 1124 | elif self._api_key and self._api_key != anon_user.api_key: |
|
1124 | 1125 | log.debug('Trying Auth User lookup by API KEY: `%s`', self._api_key) |
|
1125 | 1126 | is_user_loaded = user_model.fill_data(self, api_key=self._api_key) |
|
1126 | 1127 | |
|
1127 | 1128 | # lookup by username |
|
1128 | 1129 | elif self.username: |
|
1129 | 1130 | log.debug('Trying Auth User lookup by USER NAME: `%s`', self.username) |
|
1130 | 1131 | is_user_loaded = user_model.fill_data(self, username=self.username) |
|
1131 | 1132 | else: |
|
1132 | 1133 | log.debug('No data in %s that could been used to log in', self) |
|
1133 | 1134 | |
|
1134 | 1135 | if not is_user_loaded: |
|
1135 | 1136 | log.debug( |
|
1136 | 1137 | 'Failed to load user. Fallback to default user %s', anon_user) |
|
1137 | 1138 | # if we cannot authenticate user try anonymous |
|
1138 | 1139 | if anon_user.active: |
|
1139 | 1140 | log.debug('default user is active, using it as a session user') |
|
1140 | 1141 | user_model.fill_data(self, user_id=anon_user.user_id) |
|
1141 | 1142 | # then we set this user is logged in |
|
1142 | 1143 | self.is_authenticated = True |
|
1143 | 1144 | else: |
|
1144 | 1145 | log.debug('default user is NOT active') |
|
1145 | 1146 | # in case of disabled anonymous user we reset some of the |
|
1146 | 1147 | # parameters so such user is "corrupted", skipping the fill_data |
|
1147 | 1148 | for attr in ['user_id', 'username', 'admin', 'active']: |
|
1148 | 1149 | setattr(self, attr, None) |
|
1149 | 1150 | self.is_authenticated = False |
|
1150 | 1151 | |
|
1151 | 1152 | if not self.username: |
|
1152 | 1153 | self.username = 'None' |
|
1153 | 1154 | |
|
1154 | 1155 | log.debug('AuthUser: propagated user is now %s', self) |
|
1155 | 1156 | |
|
1156 | 1157 | def get_perms(self, user, scope=None, explicit=True, algo='higherwin', |
|
1157 | 1158 | calculate_super_admin=False, cache=None): |
|
1158 | 1159 | """ |
|
1159 | 1160 | Fills user permission attribute with permissions taken from database |
|
1160 | 1161 | works for permissions given for repositories, and for permissions that |
|
1161 | 1162 | are granted to groups |
|
1162 | 1163 | |
|
1163 | 1164 | :param user: instance of User object from database |
|
1164 | 1165 | :param explicit: In case there are permissions both for user and a group |
|
1165 | 1166 | that user is part of, explicit flag will defiine if user will |
|
1166 | 1167 | explicitly override permissions from group, if it's False it will |
|
1167 | 1168 | make decision based on the algo |
|
1168 | 1169 | :param algo: algorithm to decide what permission should be choose if |
|
1169 | 1170 | it's multiple defined, eg user in two different groups. It also |
|
1170 | 1171 | decides if explicit flag is turned off how to specify the permission |
|
1171 | 1172 | for case when user is in a group + have defined separate permission |
|
1172 | 1173 | :param calculate_super_admin: calculate permissions for super-admin in the |
|
1173 | 1174 | same way as for regular user without speedups |
|
1174 | 1175 | :param cache: Use caching for calculation, None = let the cache backend decide |
|
1175 | 1176 | """ |
|
1176 | 1177 | user_id = user.user_id |
|
1177 | 1178 | user_is_admin = user.is_admin |
|
1178 | 1179 | |
|
1179 | 1180 | # inheritance of global permissions like create repo/fork repo etc |
|
1180 | 1181 | user_inherit_default_permissions = user.inherit_default_permissions |
|
1181 | 1182 | |
|
1182 | 1183 | cache_seconds = safe_int( |
|
1183 | 1184 | rhodecode.CONFIG.get('rc_cache.cache_perms.expiration_time')) |
|
1184 | 1185 | |
|
1185 | 1186 | if cache is None: |
|
1186 | 1187 | # let the backend cache decide |
|
1187 | 1188 | cache_on = cache_seconds > 0 |
|
1188 | 1189 | else: |
|
1189 | 1190 | cache_on = cache |
|
1190 | 1191 | |
|
1191 | 1192 | log.debug( |
|
1192 | 1193 | 'Computing PERMISSION tree for user %s scope `%s` ' |
|
1193 |
'with caching: %s[TTL: %ss]' |
|
|
1194 | 'with caching: %s[TTL: %ss]', user, scope, cache_on, cache_seconds or 0) | |
|
1194 | 1195 | |
|
1195 | 1196 | cache_namespace_uid = 'cache_user_auth.{}'.format(user_id) |
|
1196 | 1197 | region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid) |
|
1197 | 1198 | |
|
1198 | 1199 | @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, |
|
1199 | 1200 | condition=cache_on) |
|
1200 | 1201 | def compute_perm_tree(cache_name, |
|
1201 | 1202 | user_id, scope, user_is_admin,user_inherit_default_permissions, |
|
1202 | 1203 | explicit, algo, calculate_super_admin): |
|
1203 | 1204 | return _cached_perms_data( |
|
1204 | 1205 | user_id, scope, user_is_admin, user_inherit_default_permissions, |
|
1205 | 1206 | explicit, algo, calculate_super_admin) |
|
1206 | 1207 | |
|
1207 | 1208 | start = time.time() |
|
1208 | 1209 | result = compute_perm_tree( |
|
1209 | 1210 | 'permissions', user_id, scope, user_is_admin, |
|
1210 | 1211 | user_inherit_default_permissions, explicit, algo, |
|
1211 | 1212 | calculate_super_admin) |
|
1212 | 1213 | |
|
1213 | 1214 | result_repr = [] |
|
1214 | 1215 | for k in result: |
|
1215 | 1216 | result_repr.append((k, len(result[k]))) |
|
1216 | 1217 | total = time.time() - start |
|
1217 |
log.debug('PERMISSION tree for user %s computed in %.3fs: %s' |
|
|
1218 |
user, total, result_repr) |
|
|
1218 | log.debug('PERMISSION tree for user %s computed in %.3fs: %s', | |
|
1219 | user, total, result_repr) | |
|
1219 | 1220 | |
|
1220 | 1221 | return result |
|
1221 | 1222 | |
|
1222 | 1223 | @property |
|
1223 | 1224 | def is_default(self): |
|
1224 | 1225 | return self.username == User.DEFAULT_USER |
|
1225 | 1226 | |
|
1226 | 1227 | @property |
|
1227 | 1228 | def is_admin(self): |
|
1228 | 1229 | return self.admin |
|
1229 | 1230 | |
|
1230 | 1231 | @property |
|
1231 | 1232 | def is_user_object(self): |
|
1232 | 1233 | return self.user_id is not None |
|
1233 | 1234 | |
|
1234 | 1235 | @property |
|
1235 | 1236 | def repositories_admin(self): |
|
1236 | 1237 | """ |
|
1237 | 1238 | Returns list of repositories you're an admin of |
|
1238 | 1239 | """ |
|
1239 | 1240 | return [ |
|
1240 | 1241 | x[0] for x in self.permissions['repositories'].items() |
|
1241 | 1242 | if x[1] == 'repository.admin'] |
|
1242 | 1243 | |
|
1243 | 1244 | @property |
|
1244 | 1245 | def repository_groups_admin(self): |
|
1245 | 1246 | """ |
|
1246 | 1247 | Returns list of repository groups you're an admin of |
|
1247 | 1248 | """ |
|
1248 | 1249 | return [ |
|
1249 | 1250 | x[0] for x in self.permissions['repositories_groups'].items() |
|
1250 | 1251 | if x[1] == 'group.admin'] |
|
1251 | 1252 | |
|
1252 | 1253 | @property |
|
1253 | 1254 | def user_groups_admin(self): |
|
1254 | 1255 | """ |
|
1255 | 1256 | Returns list of user groups you're an admin of |
|
1256 | 1257 | """ |
|
1257 | 1258 | return [ |
|
1258 | 1259 | x[0] for x in self.permissions['user_groups'].items() |
|
1259 | 1260 | if x[1] == 'usergroup.admin'] |
|
1260 | 1261 | |
|
1261 | 1262 | def repo_acl_ids(self, perms=None, name_filter=None, cache=False): |
|
1262 | 1263 | """ |
|
1263 | 1264 | Returns list of repository ids that user have access to based on given |
|
1264 | 1265 | perms. The cache flag should be only used in cases that are used for |
|
1265 | 1266 | display purposes, NOT IN ANY CASE for permission checks. |
|
1266 | 1267 | """ |
|
1267 | 1268 | from rhodecode.model.scm import RepoList |
|
1268 | 1269 | if not perms: |
|
1269 | 1270 | perms = [ |
|
1270 | 1271 | 'repository.read', 'repository.write', 'repository.admin'] |
|
1271 | 1272 | |
|
1272 | 1273 | def _cached_repo_acl(user_id, perm_def, _name_filter): |
|
1273 | 1274 | qry = Repository.query() |
|
1274 | 1275 | if _name_filter: |
|
1275 | 1276 | ilike_expression = u'%{}%'.format(safe_unicode(_name_filter)) |
|
1276 | 1277 | qry = qry.filter( |
|
1277 | 1278 | Repository.repo_name.ilike(ilike_expression)) |
|
1278 | 1279 | |
|
1279 | 1280 | return [x.repo_id for x in |
|
1280 | 1281 | RepoList(qry, perm_set=perm_def)] |
|
1281 | 1282 | |
|
1282 | 1283 | return _cached_repo_acl(self.user_id, perms, name_filter) |
|
1283 | 1284 | |
|
1284 | 1285 | def repo_group_acl_ids(self, perms=None, name_filter=None, cache=False): |
|
1285 | 1286 | """ |
|
1286 | 1287 | Returns list of repository group ids that user have access to based on given |
|
1287 | 1288 | perms. The cache flag should be only used in cases that are used for |
|
1288 | 1289 | display purposes, NOT IN ANY CASE for permission checks. |
|
1289 | 1290 | """ |
|
1290 | 1291 | from rhodecode.model.scm import RepoGroupList |
|
1291 | 1292 | if not perms: |
|
1292 | 1293 | perms = [ |
|
1293 | 1294 | 'group.read', 'group.write', 'group.admin'] |
|
1294 | 1295 | |
|
1295 | 1296 | def _cached_repo_group_acl(user_id, perm_def, _name_filter): |
|
1296 | 1297 | qry = RepoGroup.query() |
|
1297 | 1298 | if _name_filter: |
|
1298 | 1299 | ilike_expression = u'%{}%'.format(safe_unicode(_name_filter)) |
|
1299 | 1300 | qry = qry.filter( |
|
1300 | 1301 | RepoGroup.group_name.ilike(ilike_expression)) |
|
1301 | 1302 | |
|
1302 | 1303 | return [x.group_id for x in |
|
1303 | 1304 | RepoGroupList(qry, perm_set=perm_def)] |
|
1304 | 1305 | |
|
1305 | 1306 | return _cached_repo_group_acl(self.user_id, perms, name_filter) |
|
1306 | 1307 | |
|
1307 | 1308 | def user_group_acl_ids(self, perms=None, name_filter=None, cache=False): |
|
1308 | 1309 | """ |
|
1309 | 1310 | Returns list of user group ids that user have access to based on given |
|
1310 | 1311 | perms. The cache flag should be only used in cases that are used for |
|
1311 | 1312 | display purposes, NOT IN ANY CASE for permission checks. |
|
1312 | 1313 | """ |
|
1313 | 1314 | from rhodecode.model.scm import UserGroupList |
|
1314 | 1315 | if not perms: |
|
1315 | 1316 | perms = [ |
|
1316 | 1317 | 'usergroup.read', 'usergroup.write', 'usergroup.admin'] |
|
1317 | 1318 | |
|
1318 | 1319 | def _cached_user_group_acl(user_id, perm_def, name_filter): |
|
1319 | 1320 | qry = UserGroup.query() |
|
1320 | 1321 | if name_filter: |
|
1321 | 1322 | ilike_expression = u'%{}%'.format(safe_unicode(name_filter)) |
|
1322 | 1323 | qry = qry.filter( |
|
1323 | 1324 | UserGroup.users_group_name.ilike(ilike_expression)) |
|
1324 | 1325 | |
|
1325 | 1326 | return [x.users_group_id for x in |
|
1326 | 1327 | UserGroupList(qry, perm_set=perm_def)] |
|
1327 | 1328 | |
|
1328 | 1329 | return _cached_user_group_acl(self.user_id, perms, name_filter) |
|
1329 | 1330 | |
|
1330 | 1331 | @property |
|
1331 | 1332 | def ip_allowed(self): |
|
1332 | 1333 | """ |
|
1333 | 1334 | Checks if ip_addr used in constructor is allowed from defined list of |
|
1334 | 1335 | allowed ip_addresses for user |
|
1335 | 1336 | |
|
1336 | 1337 | :returns: boolean, True if ip is in allowed ip range |
|
1337 | 1338 | """ |
|
1338 | 1339 | # check IP |
|
1339 | 1340 | inherit = self.inherit_default_permissions |
|
1340 | 1341 | return AuthUser.check_ip_allowed(self.user_id, self.ip_addr, |
|
1341 | 1342 | inherit_from_default=inherit) |
|
1342 | 1343 | @property |
|
1343 | 1344 | def personal_repo_group(self): |
|
1344 | 1345 | return RepoGroup.get_user_personal_repo_group(self.user_id) |
|
1345 | 1346 | |
|
1346 | 1347 | @LazyProperty |
|
1347 | 1348 | def feed_token(self): |
|
1348 | 1349 | return self.get_instance().feed_token |
|
1349 | 1350 | |
|
1350 | 1351 | @classmethod |
|
1351 | 1352 | def check_ip_allowed(cls, user_id, ip_addr, inherit_from_default): |
|
1352 | 1353 | allowed_ips = AuthUser.get_allowed_ips( |
|
1353 | 1354 | user_id, cache=True, inherit_from_default=inherit_from_default) |
|
1354 | 1355 | if check_ip_access(source_ip=ip_addr, allowed_ips=allowed_ips): |
|
1355 |
log.debug('IP:%s for user %s is in range of %s' |
|
|
1356 |
ip_addr, user_id, allowed_ips) |
|
|
1356 | log.debug('IP:%s for user %s is in range of %s', | |
|
1357 | ip_addr, user_id, allowed_ips) | |
|
1357 | 1358 | return True |
|
1358 | 1359 | else: |
|
1359 | 1360 | log.info('Access for IP:%s forbidden for user %s, ' |
|
1360 |
'not in %s' |
|
|
1361 | 'not in %s', ip_addr, user_id, allowed_ips) | |
|
1361 | 1362 | return False |
|
1362 | 1363 | |
|
1363 | 1364 | def get_branch_permissions(self, repo_name, perms=None): |
|
1364 | 1365 | perms = perms or self.permissions_with_scope({'repo_name': repo_name}) |
|
1365 | 1366 | branch_perms = perms.get('repository_branches', {}) |
|
1366 | 1367 | if not branch_perms: |
|
1367 | 1368 | return {} |
|
1368 | 1369 | repo_branch_perms = branch_perms.get(repo_name) |
|
1369 | 1370 | return repo_branch_perms or {} |
|
1370 | 1371 | |
|
1371 | 1372 | def get_rule_and_branch_permission(self, repo_name, branch_name): |
|
1372 | 1373 | """ |
|
1373 | 1374 | Check if this AuthUser has defined any permissions for branches. If any of |
|
1374 | 1375 | the rules match in order, we return the matching permissions |
|
1375 | 1376 | """ |
|
1376 | 1377 | |
|
1377 | 1378 | rule = default_perm = '' |
|
1378 | 1379 | |
|
1379 | 1380 | repo_branch_perms = self.get_branch_permissions(repo_name=repo_name) |
|
1380 | 1381 | if not repo_branch_perms: |
|
1381 | 1382 | return rule, default_perm |
|
1382 | 1383 | |
|
1383 | 1384 | # now calculate the permissions |
|
1384 | 1385 | for pattern, branch_perm in repo_branch_perms.items(): |
|
1385 | 1386 | if fnmatch.fnmatch(branch_name, pattern): |
|
1386 | 1387 | rule = '`{}`=>{}'.format(pattern, branch_perm) |
|
1387 | 1388 | return rule, branch_perm |
|
1388 | 1389 | |
|
1389 | 1390 | return rule, default_perm |
|
1390 | 1391 | |
|
1391 | 1392 | def __repr__(self): |
|
1392 | 1393 | return "<AuthUser('id:%s[%s] ip:%s auth:%s')>"\ |
|
1393 | 1394 | % (self.user_id, self.username, self.ip_addr, self.is_authenticated) |
|
1394 | 1395 | |
|
1395 | 1396 | def set_authenticated(self, authenticated=True): |
|
1396 | 1397 | if self.user_id != self.anonymous_user.user_id: |
|
1397 | 1398 | self.is_authenticated = authenticated |
|
1398 | 1399 | |
|
1399 | 1400 | def get_cookie_store(self): |
|
1400 | 1401 | return { |
|
1401 | 1402 | 'username': self.username, |
|
1402 | 1403 | 'password': md5(self.password or ''), |
|
1403 | 1404 | 'user_id': self.user_id, |
|
1404 | 1405 | 'is_authenticated': self.is_authenticated |
|
1405 | 1406 | } |
|
1406 | 1407 | |
|
1407 | 1408 | @classmethod |
|
1408 | 1409 | def from_cookie_store(cls, cookie_store): |
|
1409 | 1410 | """ |
|
1410 | 1411 | Creates AuthUser from a cookie store |
|
1411 | 1412 | |
|
1412 | 1413 | :param cls: |
|
1413 | 1414 | :param cookie_store: |
|
1414 | 1415 | """ |
|
1415 | 1416 | user_id = cookie_store.get('user_id') |
|
1416 | 1417 | username = cookie_store.get('username') |
|
1417 | 1418 | api_key = cookie_store.get('api_key') |
|
1418 | 1419 | return AuthUser(user_id, api_key, username) |
|
1419 | 1420 | |
|
1420 | 1421 | @classmethod |
|
1421 | 1422 | def get_allowed_ips(cls, user_id, cache=False, inherit_from_default=False): |
|
1422 | 1423 | _set = set() |
|
1423 | 1424 | |
|
1424 | 1425 | if inherit_from_default: |
|
1425 | 1426 | def_user_id = User.get_default_user(cache=True).user_id |
|
1426 | 1427 | default_ips = UserIpMap.query().filter(UserIpMap.user_id == def_user_id) |
|
1427 | 1428 | if cache: |
|
1428 | 1429 | default_ips = default_ips.options( |
|
1429 | 1430 | FromCache("sql_cache_short", "get_user_ips_default")) |
|
1430 | 1431 | |
|
1431 | 1432 | # populate from default user |
|
1432 | 1433 | for ip in default_ips: |
|
1433 | 1434 | try: |
|
1434 | 1435 | _set.add(ip.ip_addr) |
|
1435 | 1436 | except ObjectDeletedError: |
|
1436 | 1437 | # since we use heavy caching sometimes it happens that |
|
1437 | 1438 | # we get deleted objects here, we just skip them |
|
1438 | 1439 | pass |
|
1439 | 1440 | |
|
1440 | 1441 | # NOTE:(marcink) we don't want to load any rules for empty |
|
1441 | 1442 | # user_id which is the case of access of non logged users when anonymous |
|
1442 | 1443 | # access is disabled |
|
1443 | 1444 | user_ips = [] |
|
1444 | 1445 | if user_id: |
|
1445 | 1446 | user_ips = UserIpMap.query().filter(UserIpMap.user_id == user_id) |
|
1446 | 1447 | if cache: |
|
1447 | 1448 | user_ips = user_ips.options( |
|
1448 | 1449 | FromCache("sql_cache_short", "get_user_ips_%s" % user_id)) |
|
1449 | 1450 | |
|
1450 | 1451 | for ip in user_ips: |
|
1451 | 1452 | try: |
|
1452 | 1453 | _set.add(ip.ip_addr) |
|
1453 | 1454 | except ObjectDeletedError: |
|
1454 | 1455 | # since we use heavy caching sometimes it happens that we get |
|
1455 | 1456 | # deleted objects here, we just skip them |
|
1456 | 1457 | pass |
|
1457 | 1458 | return _set or {ip for ip in ['0.0.0.0/0', '::/0']} |
|
1458 | 1459 | |
|
1459 | 1460 | |
|
1460 | 1461 | def set_available_permissions(settings): |
|
1461 | 1462 | """ |
|
1462 | 1463 | This function will propagate pyramid settings with all available defined |
|
1463 | 1464 | permission given in db. We don't want to check each time from db for new |
|
1464 | 1465 | permissions since adding a new permission also requires application restart |
|
1465 | 1466 | ie. to decorate new views with the newly created permission |
|
1466 | 1467 | |
|
1467 | 1468 | :param settings: current pyramid registry.settings |
|
1468 | 1469 | |
|
1469 | 1470 | """ |
|
1470 | 1471 | log.debug('auth: getting information about all available permissions') |
|
1471 | 1472 | try: |
|
1472 | 1473 | sa = meta.Session |
|
1473 | 1474 | all_perms = sa.query(Permission).all() |
|
1474 | 1475 | settings.setdefault('available_permissions', |
|
1475 | 1476 | [x.permission_name for x in all_perms]) |
|
1476 | 1477 | log.debug('auth: set available permissions') |
|
1477 | 1478 | except Exception: |
|
1478 | 1479 | log.exception('Failed to fetch permissions from the database.') |
|
1479 | 1480 | raise |
|
1480 | 1481 | |
|
1481 | 1482 | |
|
1482 | 1483 | def get_csrf_token(session, force_new=False, save_if_missing=True): |
|
1483 | 1484 | """ |
|
1484 | 1485 | Return the current authentication token, creating one if one doesn't |
|
1485 | 1486 | already exist and the save_if_missing flag is present. |
|
1486 | 1487 | |
|
1487 | 1488 | :param session: pass in the pyramid session, else we use the global ones |
|
1488 | 1489 | :param force_new: force to re-generate the token and store it in session |
|
1489 | 1490 | :param save_if_missing: save the newly generated token if it's missing in |
|
1490 | 1491 | session |
|
1491 | 1492 | """ |
|
1492 | 1493 | # NOTE(marcink): probably should be replaced with below one from pyramid 1.9 |
|
1493 | 1494 | # from pyramid.csrf import get_csrf_token |
|
1494 | 1495 | |
|
1495 | 1496 | if (csrf_token_key not in session and save_if_missing) or force_new: |
|
1496 | 1497 | token = hashlib.sha1(str(random.getrandbits(128))).hexdigest() |
|
1497 | 1498 | session[csrf_token_key] = token |
|
1498 | 1499 | if hasattr(session, 'save'): |
|
1499 | 1500 | session.save() |
|
1500 | 1501 | return session.get(csrf_token_key) |
|
1501 | 1502 | |
|
1502 | 1503 | |
|
1503 | 1504 | def get_request(perm_class_instance): |
|
1504 | 1505 | from pyramid.threadlocal import get_current_request |
|
1505 | 1506 | pyramid_request = get_current_request() |
|
1506 | 1507 | return pyramid_request |
|
1507 | 1508 | |
|
1508 | 1509 | |
|
1509 | 1510 | # CHECK DECORATORS |
|
1510 | 1511 | class CSRFRequired(object): |
|
1511 | 1512 | """ |
|
1512 | 1513 | Decorator for authenticating a form |
|
1513 | 1514 | |
|
1514 | 1515 | This decorator uses an authorization token stored in the client's |
|
1515 | 1516 | session for prevention of certain Cross-site request forgery (CSRF) |
|
1516 | 1517 | attacks (See |
|
1517 | 1518 | http://en.wikipedia.org/wiki/Cross-site_request_forgery for more |
|
1518 | 1519 | information). |
|
1519 | 1520 | |
|
1520 | 1521 | For use with the ``webhelpers.secure_form`` helper functions. |
|
1521 | 1522 | |
|
1522 | 1523 | """ |
|
1523 | 1524 | def __init__(self, token=csrf_token_key, header='X-CSRF-Token', |
|
1524 | 1525 | except_methods=None): |
|
1525 | 1526 | self.token = token |
|
1526 | 1527 | self.header = header |
|
1527 | 1528 | self.except_methods = except_methods or [] |
|
1528 | 1529 | |
|
1529 | 1530 | def __call__(self, func): |
|
1530 | 1531 | return get_cython_compat_decorator(self.__wrapper, func) |
|
1531 | 1532 | |
|
1532 | 1533 | def _get_csrf(self, _request): |
|
1533 | 1534 | return _request.POST.get(self.token, _request.headers.get(self.header)) |
|
1534 | 1535 | |
|
1535 | 1536 | def check_csrf(self, _request, cur_token): |
|
1536 | 1537 | supplied_token = self._get_csrf(_request) |
|
1537 | 1538 | return supplied_token and supplied_token == cur_token |
|
1538 | 1539 | |
|
1539 | 1540 | def _get_request(self): |
|
1540 | 1541 | return get_request(self) |
|
1541 | 1542 | |
|
1542 | 1543 | def __wrapper(self, func, *fargs, **fkwargs): |
|
1543 | 1544 | request = self._get_request() |
|
1544 | 1545 | |
|
1545 | 1546 | if request.method in self.except_methods: |
|
1546 | 1547 | return func(*fargs, **fkwargs) |
|
1547 | 1548 | |
|
1548 | 1549 | cur_token = get_csrf_token(request.session, save_if_missing=False) |
|
1549 | 1550 | if self.check_csrf(request, cur_token): |
|
1550 | 1551 | if request.POST.get(self.token): |
|
1551 | 1552 | del request.POST[self.token] |
|
1552 | 1553 | return func(*fargs, **fkwargs) |
|
1553 | 1554 | else: |
|
1554 | 1555 | reason = 'token-missing' |
|
1555 | 1556 | supplied_token = self._get_csrf(request) |
|
1556 | 1557 | if supplied_token and cur_token != supplied_token: |
|
1557 | 1558 | reason = 'token-mismatch [%s:%s]' % ( |
|
1558 | 1559 | cur_token or ''[:6], supplied_token or ''[:6]) |
|
1559 | 1560 | |
|
1560 | 1561 | csrf_message = \ |
|
1561 | 1562 | ("Cross-site request forgery detected, request denied. See " |
|
1562 | 1563 | "http://en.wikipedia.org/wiki/Cross-site_request_forgery for " |
|
1563 | 1564 | "more information.") |
|
1564 | 1565 | log.warn('Cross-site request forgery detected, request %r DENIED: %s ' |
|
1565 | 1566 | 'REMOTE_ADDR:%s, HEADERS:%s' % ( |
|
1566 | 1567 | request, reason, request.remote_addr, request.headers)) |
|
1567 | 1568 | |
|
1568 | 1569 | raise HTTPForbidden(explanation=csrf_message) |
|
1569 | 1570 | |
|
1570 | 1571 | |
|
1571 | 1572 | class LoginRequired(object): |
|
1572 | 1573 | """ |
|
1573 | 1574 | Must be logged in to execute this function else |
|
1574 | 1575 | redirect to login page |
|
1575 | 1576 | |
|
1576 | 1577 | :param api_access: if enabled this checks only for valid auth token |
|
1577 | 1578 | and grants access based on valid token |
|
1578 | 1579 | """ |
|
1579 | 1580 | def __init__(self, auth_token_access=None): |
|
1580 | 1581 | self.auth_token_access = auth_token_access |
|
1581 | 1582 | |
|
1582 | 1583 | def __call__(self, func): |
|
1583 | 1584 | return get_cython_compat_decorator(self.__wrapper, func) |
|
1584 | 1585 | |
|
1585 | 1586 | def _get_request(self): |
|
1586 | 1587 | return get_request(self) |
|
1587 | 1588 | |
|
1588 | 1589 | def __wrapper(self, func, *fargs, **fkwargs): |
|
1589 | 1590 | from rhodecode.lib import helpers as h |
|
1590 | 1591 | cls = fargs[0] |
|
1591 | 1592 | user = cls._rhodecode_user |
|
1592 | 1593 | request = self._get_request() |
|
1593 | 1594 | _ = request.translate |
|
1594 | 1595 | |
|
1595 | 1596 | loc = "%s:%s" % (cls.__class__.__name__, func.__name__) |
|
1596 |
log.debug('Starting login restriction checks for user: %s' |
|
|
1597 | log.debug('Starting login restriction checks for user: %s', user) | |
|
1597 | 1598 | # check if our IP is allowed |
|
1598 | 1599 | ip_access_valid = True |
|
1599 | 1600 | if not user.ip_allowed: |
|
1600 | 1601 | h.flash(h.literal(_('IP %s not allowed' % (user.ip_addr,))), |
|
1601 | 1602 | category='warning') |
|
1602 | 1603 | ip_access_valid = False |
|
1603 | 1604 | |
|
1604 | 1605 | # check if we used an APIKEY and it's a valid one |
|
1605 | 1606 | # defined white-list of controllers which API access will be enabled |
|
1606 | 1607 | _auth_token = request.GET.get( |
|
1607 | 1608 | 'auth_token', '') or request.GET.get('api_key', '') |
|
1608 | 1609 | auth_token_access_valid = allowed_auth_token_access( |
|
1609 | 1610 | loc, auth_token=_auth_token) |
|
1610 | 1611 | |
|
1611 | 1612 | # explicit controller is enabled or API is in our whitelist |
|
1612 | 1613 | if self.auth_token_access or auth_token_access_valid: |
|
1613 |
log.debug('Checking AUTH TOKEN access for %s' |
|
|
1614 | log.debug('Checking AUTH TOKEN access for %s', cls) | |
|
1614 | 1615 | db_user = user.get_instance() |
|
1615 | 1616 | |
|
1616 | 1617 | if db_user: |
|
1617 | 1618 | if self.auth_token_access: |
|
1618 | 1619 | roles = self.auth_token_access |
|
1619 | 1620 | else: |
|
1620 | 1621 | roles = [UserApiKeys.ROLE_HTTP] |
|
1621 | 1622 | token_match = db_user.authenticate_by_token( |
|
1622 | 1623 | _auth_token, roles=roles) |
|
1623 | 1624 | else: |
|
1624 | 1625 | log.debug('Unable to fetch db instance for auth user: %s', user) |
|
1625 | 1626 | token_match = False |
|
1626 | 1627 | |
|
1627 | 1628 | if _auth_token and token_match: |
|
1628 | 1629 | auth_token_access_valid = True |
|
1629 |
log.debug('AUTH TOKEN ****%s is VALID' |
|
|
1630 | log.debug('AUTH TOKEN ****%s is VALID', _auth_token[-4:]) | |
|
1630 | 1631 | else: |
|
1631 | 1632 | auth_token_access_valid = False |
|
1632 | 1633 | if not _auth_token: |
|
1633 | 1634 | log.debug("AUTH TOKEN *NOT* present in request") |
|
1634 | 1635 | else: |
|
1635 | log.warning( | |
|
1636 | "AUTH TOKEN ****%s *NOT* valid" % _auth_token[-4:]) | |
|
1637 | ||
|
1638 | log.debug('Checking if %s is authenticated @ %s' % (user.username, loc)) | |
|
1636 | log.warning("AUTH TOKEN ****%s *NOT* valid", _auth_token[-4:]) | |
|
1637 | ||
|
1638 | log.debug('Checking if %s is authenticated @ %s', user.username, loc) | |
|
1639 | 1639 | reason = 'RHODECODE_AUTH' if user.is_authenticated \ |
|
1640 | 1640 | else 'AUTH_TOKEN_AUTH' |
|
1641 | 1641 | |
|
1642 | 1642 | if ip_access_valid and ( |
|
1643 | 1643 | user.is_authenticated or auth_token_access_valid): |
|
1644 | log.info( | |
|
1645 | 'user %s authenticating with:%s IS authenticated on func %s' | |
|
1646 | % (user, reason, loc)) | |
|
1644 | log.info('user %s authenticating with:%s IS authenticated on func %s', | |
|
1645 | user, reason, loc) | |
|
1647 | 1646 | |
|
1648 | 1647 | return func(*fargs, **fkwargs) |
|
1649 | 1648 | else: |
|
1650 | 1649 | log.warning( |
|
1651 | 1650 | 'user %s authenticating with:%s NOT authenticated on ' |
|
1652 | 'func: %s: IP_ACCESS:%s AUTH_TOKEN_ACCESS:%s' | |
|
1653 |
|
|
|
1654 | auth_token_access_valid)) | |
|
1651 | 'func: %s: IP_ACCESS:%s AUTH_TOKEN_ACCESS:%s', | |
|
1652 | user, reason, loc, ip_access_valid, auth_token_access_valid) | |
|
1655 | 1653 | # we preserve the get PARAM |
|
1656 | 1654 | came_from = get_came_from(request) |
|
1657 | 1655 | |
|
1658 |
log.debug('redirecting to login page with %s' |
|
|
1656 | log.debug('redirecting to login page with %s', came_from) | |
|
1659 | 1657 | raise HTTPFound( |
|
1660 | 1658 | h.route_path('login', _query={'came_from': came_from})) |
|
1661 | 1659 | |
|
1662 | 1660 | |
|
1663 | 1661 | class NotAnonymous(object): |
|
1664 | 1662 | """ |
|
1665 | 1663 | Must be logged in to execute this function else |
|
1666 | 1664 | redirect to login page |
|
1667 | 1665 | """ |
|
1668 | 1666 | |
|
1669 | 1667 | def __call__(self, func): |
|
1670 | 1668 | return get_cython_compat_decorator(self.__wrapper, func) |
|
1671 | 1669 | |
|
1672 | 1670 | def _get_request(self): |
|
1673 | 1671 | return get_request(self) |
|
1674 | 1672 | |
|
1675 | 1673 | def __wrapper(self, func, *fargs, **fkwargs): |
|
1676 | 1674 | import rhodecode.lib.helpers as h |
|
1677 | 1675 | cls = fargs[0] |
|
1678 | 1676 | self.user = cls._rhodecode_user |
|
1679 | 1677 | request = self._get_request() |
|
1680 | 1678 | _ = request.translate |
|
1681 |
log.debug('Checking if user is not anonymous @%s' |
|
|
1679 | log.debug('Checking if user is not anonymous @%s', cls) | |
|
1682 | 1680 | |
|
1683 | 1681 | anonymous = self.user.username == User.DEFAULT_USER |
|
1684 | 1682 | |
|
1685 | 1683 | if anonymous: |
|
1686 | 1684 | came_from = get_came_from(request) |
|
1687 | 1685 | h.flash(_('You need to be a registered user to ' |
|
1688 | 1686 | 'perform this action'), |
|
1689 | 1687 | category='warning') |
|
1690 | 1688 | raise HTTPFound( |
|
1691 | 1689 | h.route_path('login', _query={'came_from': came_from})) |
|
1692 | 1690 | else: |
|
1693 | 1691 | return func(*fargs, **fkwargs) |
|
1694 | 1692 | |
|
1695 | 1693 | |
|
1696 | 1694 | class PermsDecorator(object): |
|
1697 | 1695 | """ |
|
1698 | 1696 | Base class for controller decorators, we extract the current user from |
|
1699 | 1697 | the class itself, which has it stored in base controllers |
|
1700 | 1698 | """ |
|
1701 | 1699 | |
|
1702 | 1700 | def __init__(self, *required_perms): |
|
1703 | 1701 | self.required_perms = set(required_perms) |
|
1704 | 1702 | |
|
1705 | 1703 | def __call__(self, func): |
|
1706 | 1704 | return get_cython_compat_decorator(self.__wrapper, func) |
|
1707 | 1705 | |
|
1708 | 1706 | def _get_request(self): |
|
1709 | 1707 | return get_request(self) |
|
1710 | 1708 | |
|
1711 | 1709 | def __wrapper(self, func, *fargs, **fkwargs): |
|
1712 | 1710 | import rhodecode.lib.helpers as h |
|
1713 | 1711 | cls = fargs[0] |
|
1714 | 1712 | _user = cls._rhodecode_user |
|
1715 | 1713 | request = self._get_request() |
|
1716 | 1714 | _ = request.translate |
|
1717 | 1715 | |
|
1718 | 1716 | log.debug('checking %s permissions %s for %s %s', |
|
1719 | 1717 | self.__class__.__name__, self.required_perms, cls, _user) |
|
1720 | 1718 | |
|
1721 | 1719 | if self.check_permissions(_user): |
|
1722 | 1720 | log.debug('Permission granted for %s %s', cls, _user) |
|
1723 | 1721 | return func(*fargs, **fkwargs) |
|
1724 | 1722 | |
|
1725 | 1723 | else: |
|
1726 | 1724 | log.debug('Permission denied for %s %s', cls, _user) |
|
1727 | 1725 | anonymous = _user.username == User.DEFAULT_USER |
|
1728 | 1726 | |
|
1729 | 1727 | if anonymous: |
|
1730 | 1728 | came_from = get_came_from(self._get_request()) |
|
1731 | 1729 | h.flash(_('You need to be signed in to view this page'), |
|
1732 | 1730 | category='warning') |
|
1733 | 1731 | raise HTTPFound( |
|
1734 | 1732 | h.route_path('login', _query={'came_from': came_from})) |
|
1735 | 1733 | |
|
1736 | 1734 | else: |
|
1737 | 1735 | # redirect with 404 to prevent resource discovery |
|
1738 | 1736 | raise HTTPNotFound() |
|
1739 | 1737 | |
|
1740 | 1738 | def check_permissions(self, user): |
|
1741 | 1739 | """Dummy function for overriding""" |
|
1742 | 1740 | raise NotImplementedError( |
|
1743 | 1741 | 'You have to write this function in child class') |
|
1744 | 1742 | |
|
1745 | 1743 | |
|
1746 | 1744 | class HasPermissionAllDecorator(PermsDecorator): |
|
1747 | 1745 | """ |
|
1748 | 1746 | Checks for access permission for all given predicates. All of them |
|
1749 | 1747 | have to be meet in order to fulfill the request |
|
1750 | 1748 | """ |
|
1751 | 1749 | |
|
1752 | 1750 | def check_permissions(self, user): |
|
1753 | 1751 | perms = user.permissions_with_scope({}) |
|
1754 | 1752 | if self.required_perms.issubset(perms['global']): |
|
1755 | 1753 | return True |
|
1756 | 1754 | return False |
|
1757 | 1755 | |
|
1758 | 1756 | |
|
1759 | 1757 | class HasPermissionAnyDecorator(PermsDecorator): |
|
1760 | 1758 | """ |
|
1761 | 1759 | Checks for access permission for any of given predicates. In order to |
|
1762 | 1760 | fulfill the request any of predicates must be meet |
|
1763 | 1761 | """ |
|
1764 | 1762 | |
|
1765 | 1763 | def check_permissions(self, user): |
|
1766 | 1764 | perms = user.permissions_with_scope({}) |
|
1767 | 1765 | if self.required_perms.intersection(perms['global']): |
|
1768 | 1766 | return True |
|
1769 | 1767 | return False |
|
1770 | 1768 | |
|
1771 | 1769 | |
|
1772 | 1770 | class HasRepoPermissionAllDecorator(PermsDecorator): |
|
1773 | 1771 | """ |
|
1774 | 1772 | Checks for access permission for all given predicates for specific |
|
1775 | 1773 | repository. All of them have to be meet in order to fulfill the request |
|
1776 | 1774 | """ |
|
1777 | 1775 | def _get_repo_name(self): |
|
1778 | 1776 | _request = self._get_request() |
|
1779 | 1777 | return get_repo_slug(_request) |
|
1780 | 1778 | |
|
1781 | 1779 | def check_permissions(self, user): |
|
1782 | 1780 | perms = user.permissions |
|
1783 | 1781 | repo_name = self._get_repo_name() |
|
1784 | 1782 | |
|
1785 | 1783 | try: |
|
1786 | 1784 | user_perms = {perms['repositories'][repo_name]} |
|
1787 | 1785 | except KeyError: |
|
1788 | 1786 | log.debug('cannot locate repo with name: `%s` in permissions defs', |
|
1789 | 1787 | repo_name) |
|
1790 | 1788 | return False |
|
1791 | 1789 | |
|
1792 | 1790 | log.debug('checking `%s` permissions for repo `%s`', |
|
1793 | 1791 | user_perms, repo_name) |
|
1794 | 1792 | if self.required_perms.issubset(user_perms): |
|
1795 | 1793 | return True |
|
1796 | 1794 | return False |
|
1797 | 1795 | |
|
1798 | 1796 | |
|
1799 | 1797 | class HasRepoPermissionAnyDecorator(PermsDecorator): |
|
1800 | 1798 | """ |
|
1801 | 1799 | Checks for access permission for any of given predicates for specific |
|
1802 | 1800 | repository. In order to fulfill the request any of predicates must be meet |
|
1803 | 1801 | """ |
|
1804 | 1802 | def _get_repo_name(self): |
|
1805 | 1803 | _request = self._get_request() |
|
1806 | 1804 | return get_repo_slug(_request) |
|
1807 | 1805 | |
|
1808 | 1806 | def check_permissions(self, user): |
|
1809 | 1807 | perms = user.permissions |
|
1810 | 1808 | repo_name = self._get_repo_name() |
|
1811 | 1809 | |
|
1812 | 1810 | try: |
|
1813 | 1811 | user_perms = {perms['repositories'][repo_name]} |
|
1814 | 1812 | except KeyError: |
|
1815 | 1813 | log.debug( |
|
1816 | 1814 | 'cannot locate repo with name: `%s` in permissions defs', |
|
1817 | 1815 | repo_name) |
|
1818 | 1816 | return False |
|
1819 | 1817 | |
|
1820 | 1818 | log.debug('checking `%s` permissions for repo `%s`', |
|
1821 | 1819 | user_perms, repo_name) |
|
1822 | 1820 | if self.required_perms.intersection(user_perms): |
|
1823 | 1821 | return True |
|
1824 | 1822 | return False |
|
1825 | 1823 | |
|
1826 | 1824 | |
|
1827 | 1825 | class HasRepoGroupPermissionAllDecorator(PermsDecorator): |
|
1828 | 1826 | """ |
|
1829 | 1827 | Checks for access permission for all given predicates for specific |
|
1830 | 1828 | repository group. All of them have to be meet in order to |
|
1831 | 1829 | fulfill the request |
|
1832 | 1830 | """ |
|
1833 | 1831 | def _get_repo_group_name(self): |
|
1834 | 1832 | _request = self._get_request() |
|
1835 | 1833 | return get_repo_group_slug(_request) |
|
1836 | 1834 | |
|
1837 | 1835 | def check_permissions(self, user): |
|
1838 | 1836 | perms = user.permissions |
|
1839 | 1837 | group_name = self._get_repo_group_name() |
|
1840 | 1838 | try: |
|
1841 | 1839 | user_perms = {perms['repositories_groups'][group_name]} |
|
1842 | 1840 | except KeyError: |
|
1843 | 1841 | log.debug( |
|
1844 | 1842 | 'cannot locate repo group with name: `%s` in permissions defs', |
|
1845 | 1843 | group_name) |
|
1846 | 1844 | return False |
|
1847 | 1845 | |
|
1848 | 1846 | log.debug('checking `%s` permissions for repo group `%s`', |
|
1849 | 1847 | user_perms, group_name) |
|
1850 | 1848 | if self.required_perms.issubset(user_perms): |
|
1851 | 1849 | return True |
|
1852 | 1850 | return False |
|
1853 | 1851 | |
|
1854 | 1852 | |
|
1855 | 1853 | class HasRepoGroupPermissionAnyDecorator(PermsDecorator): |
|
1856 | 1854 | """ |
|
1857 | 1855 | Checks for access permission for any of given predicates for specific |
|
1858 | 1856 | repository group. In order to fulfill the request any |
|
1859 | 1857 | of predicates must be met |
|
1860 | 1858 | """ |
|
1861 | 1859 | def _get_repo_group_name(self): |
|
1862 | 1860 | _request = self._get_request() |
|
1863 | 1861 | return get_repo_group_slug(_request) |
|
1864 | 1862 | |
|
1865 | 1863 | def check_permissions(self, user): |
|
1866 | 1864 | perms = user.permissions |
|
1867 | 1865 | group_name = self._get_repo_group_name() |
|
1868 | 1866 | |
|
1869 | 1867 | try: |
|
1870 | 1868 | user_perms = {perms['repositories_groups'][group_name]} |
|
1871 | 1869 | except KeyError: |
|
1872 | 1870 | log.debug( |
|
1873 | 1871 | 'cannot locate repo group with name: `%s` in permissions defs', |
|
1874 | 1872 | group_name) |
|
1875 | 1873 | return False |
|
1876 | 1874 | |
|
1877 | 1875 | log.debug('checking `%s` permissions for repo group `%s`', |
|
1878 | 1876 | user_perms, group_name) |
|
1879 | 1877 | if self.required_perms.intersection(user_perms): |
|
1880 | 1878 | return True |
|
1881 | 1879 | return False |
|
1882 | 1880 | |
|
1883 | 1881 | |
|
1884 | 1882 | class HasUserGroupPermissionAllDecorator(PermsDecorator): |
|
1885 | 1883 | """ |
|
1886 | 1884 | Checks for access permission for all given predicates for specific |
|
1887 | 1885 | user group. All of them have to be meet in order to fulfill the request |
|
1888 | 1886 | """ |
|
1889 | 1887 | def _get_user_group_name(self): |
|
1890 | 1888 | _request = self._get_request() |
|
1891 | 1889 | return get_user_group_slug(_request) |
|
1892 | 1890 | |
|
1893 | 1891 | def check_permissions(self, user): |
|
1894 | 1892 | perms = user.permissions |
|
1895 | 1893 | group_name = self._get_user_group_name() |
|
1896 | 1894 | try: |
|
1897 | 1895 | user_perms = {perms['user_groups'][group_name]} |
|
1898 | 1896 | except KeyError: |
|
1899 | 1897 | return False |
|
1900 | 1898 | |
|
1901 | 1899 | if self.required_perms.issubset(user_perms): |
|
1902 | 1900 | return True |
|
1903 | 1901 | return False |
|
1904 | 1902 | |
|
1905 | 1903 | |
|
1906 | 1904 | class HasUserGroupPermissionAnyDecorator(PermsDecorator): |
|
1907 | 1905 | """ |
|
1908 | 1906 | Checks for access permission for any of given predicates for specific |
|
1909 | 1907 | user group. In order to fulfill the request any of predicates must be meet |
|
1910 | 1908 | """ |
|
1911 | 1909 | def _get_user_group_name(self): |
|
1912 | 1910 | _request = self._get_request() |
|
1913 | 1911 | return get_user_group_slug(_request) |
|
1914 | 1912 | |
|
1915 | 1913 | def check_permissions(self, user): |
|
1916 | 1914 | perms = user.permissions |
|
1917 | 1915 | group_name = self._get_user_group_name() |
|
1918 | 1916 | try: |
|
1919 | 1917 | user_perms = {perms['user_groups'][group_name]} |
|
1920 | 1918 | except KeyError: |
|
1921 | 1919 | return False |
|
1922 | 1920 | |
|
1923 | 1921 | if self.required_perms.intersection(user_perms): |
|
1924 | 1922 | return True |
|
1925 | 1923 | return False |
|
1926 | 1924 | |
|
1927 | 1925 | |
|
1928 | 1926 | # CHECK FUNCTIONS |
|
1929 | 1927 | class PermsFunction(object): |
|
1930 | 1928 | """Base function for other check functions""" |
|
1931 | 1929 | |
|
1932 | 1930 | def __init__(self, *perms): |
|
1933 | 1931 | self.required_perms = set(perms) |
|
1934 | 1932 | self.repo_name = None |
|
1935 | 1933 | self.repo_group_name = None |
|
1936 | 1934 | self.user_group_name = None |
|
1937 | 1935 | |
|
1938 | 1936 | def __bool__(self): |
|
1939 | 1937 | frame = inspect.currentframe() |
|
1940 | 1938 | stack_trace = traceback.format_stack(frame) |
|
1941 | 1939 | log.error('Checking bool value on a class instance of perm ' |
|
1942 |
'function is not allowed: %s' |
|
|
1940 | 'function is not allowed: %s', ''.join(stack_trace)) | |
|
1943 | 1941 | # rather than throwing errors, here we always return False so if by |
|
1944 | 1942 | # accident someone checks truth for just an instance it will always end |
|
1945 | 1943 | # up in returning False |
|
1946 | 1944 | return False |
|
1947 | 1945 | __nonzero__ = __bool__ |
|
1948 | 1946 | |
|
1949 | 1947 | def __call__(self, check_location='', user=None): |
|
1950 | 1948 | if not user: |
|
1951 | 1949 | log.debug('Using user attribute from global request') |
|
1952 | 1950 | request = self._get_request() |
|
1953 | 1951 | user = request.user |
|
1954 | 1952 | |
|
1955 | 1953 | # init auth user if not already given |
|
1956 | 1954 | if not isinstance(user, AuthUser): |
|
1957 | 1955 | log.debug('Wrapping user %s into AuthUser', user) |
|
1958 | 1956 | user = AuthUser(user.user_id) |
|
1959 | 1957 | |
|
1960 | 1958 | cls_name = self.__class__.__name__ |
|
1961 | 1959 | check_scope = self._get_check_scope(cls_name) |
|
1962 | 1960 | check_location = check_location or 'unspecified location' |
|
1963 | 1961 | |
|
1964 | 1962 | log.debug('checking cls:%s %s usr:%s %s @ %s', cls_name, |
|
1965 | 1963 | self.required_perms, user, check_scope, check_location) |
|
1966 | 1964 | if not user: |
|
1967 | 1965 | log.warning('Empty user given for permission check') |
|
1968 | 1966 | return False |
|
1969 | 1967 | |
|
1970 | 1968 | if self.check_permissions(user): |
|
1971 | 1969 | log.debug('Permission to repo:`%s` GRANTED for user:`%s` @ %s', |
|
1972 | 1970 | check_scope, user, check_location) |
|
1973 | 1971 | return True |
|
1974 | 1972 | |
|
1975 | 1973 | else: |
|
1976 | 1974 | log.debug('Permission to repo:`%s` DENIED for user:`%s` @ %s', |
|
1977 | 1975 | check_scope, user, check_location) |
|
1978 | 1976 | return False |
|
1979 | 1977 | |
|
1980 | 1978 | def _get_request(self): |
|
1981 | 1979 | return get_request(self) |
|
1982 | 1980 | |
|
1983 | 1981 | def _get_check_scope(self, cls_name): |
|
1984 | 1982 | return { |
|
1985 | 1983 | 'HasPermissionAll': 'GLOBAL', |
|
1986 | 1984 | 'HasPermissionAny': 'GLOBAL', |
|
1987 | 1985 | 'HasRepoPermissionAll': 'repo:%s' % self.repo_name, |
|
1988 | 1986 | 'HasRepoPermissionAny': 'repo:%s' % self.repo_name, |
|
1989 | 1987 | 'HasRepoGroupPermissionAll': 'repo_group:%s' % self.repo_group_name, |
|
1990 | 1988 | 'HasRepoGroupPermissionAny': 'repo_group:%s' % self.repo_group_name, |
|
1991 | 1989 | 'HasUserGroupPermissionAll': 'user_group:%s' % self.user_group_name, |
|
1992 | 1990 | 'HasUserGroupPermissionAny': 'user_group:%s' % self.user_group_name, |
|
1993 | 1991 | }.get(cls_name, '?:%s' % cls_name) |
|
1994 | 1992 | |
|
1995 | 1993 | def check_permissions(self, user): |
|
1996 | 1994 | """Dummy function for overriding""" |
|
1997 | 1995 | raise Exception('You have to write this function in child class') |
|
1998 | 1996 | |
|
1999 | 1997 | |
|
2000 | 1998 | class HasPermissionAll(PermsFunction): |
|
2001 | 1999 | def check_permissions(self, user): |
|
2002 | 2000 | perms = user.permissions_with_scope({}) |
|
2003 | 2001 | if self.required_perms.issubset(perms.get('global')): |
|
2004 | 2002 | return True |
|
2005 | 2003 | return False |
|
2006 | 2004 | |
|
2007 | 2005 | |
|
2008 | 2006 | class HasPermissionAny(PermsFunction): |
|
2009 | 2007 | def check_permissions(self, user): |
|
2010 | 2008 | perms = user.permissions_with_scope({}) |
|
2011 | 2009 | if self.required_perms.intersection(perms.get('global')): |
|
2012 | 2010 | return True |
|
2013 | 2011 | return False |
|
2014 | 2012 | |
|
2015 | 2013 | |
|
2016 | 2014 | class HasRepoPermissionAll(PermsFunction): |
|
2017 | 2015 | def __call__(self, repo_name=None, check_location='', user=None): |
|
2018 | 2016 | self.repo_name = repo_name |
|
2019 | 2017 | return super(HasRepoPermissionAll, self).__call__(check_location, user) |
|
2020 | 2018 | |
|
2021 | 2019 | def _get_repo_name(self): |
|
2022 | 2020 | if not self.repo_name: |
|
2023 | 2021 | _request = self._get_request() |
|
2024 | 2022 | self.repo_name = get_repo_slug(_request) |
|
2025 | 2023 | return self.repo_name |
|
2026 | 2024 | |
|
2027 | 2025 | def check_permissions(self, user): |
|
2028 | 2026 | self.repo_name = self._get_repo_name() |
|
2029 | 2027 | perms = user.permissions |
|
2030 | 2028 | try: |
|
2031 | 2029 | user_perms = {perms['repositories'][self.repo_name]} |
|
2032 | 2030 | except KeyError: |
|
2033 | 2031 | return False |
|
2034 | 2032 | if self.required_perms.issubset(user_perms): |
|
2035 | 2033 | return True |
|
2036 | 2034 | return False |
|
2037 | 2035 | |
|
2038 | 2036 | |
|
2039 | 2037 | class HasRepoPermissionAny(PermsFunction): |
|
2040 | 2038 | def __call__(self, repo_name=None, check_location='', user=None): |
|
2041 | 2039 | self.repo_name = repo_name |
|
2042 | 2040 | return super(HasRepoPermissionAny, self).__call__(check_location, user) |
|
2043 | 2041 | |
|
2044 | 2042 | def _get_repo_name(self): |
|
2045 | 2043 | if not self.repo_name: |
|
2046 | 2044 | _request = self._get_request() |
|
2047 | 2045 | self.repo_name = get_repo_slug(_request) |
|
2048 | 2046 | return self.repo_name |
|
2049 | 2047 | |
|
2050 | 2048 | def check_permissions(self, user): |
|
2051 | 2049 | self.repo_name = self._get_repo_name() |
|
2052 | 2050 | perms = user.permissions |
|
2053 | 2051 | try: |
|
2054 | 2052 | user_perms = {perms['repositories'][self.repo_name]} |
|
2055 | 2053 | except KeyError: |
|
2056 | 2054 | return False |
|
2057 | 2055 | if self.required_perms.intersection(user_perms): |
|
2058 | 2056 | return True |
|
2059 | 2057 | return False |
|
2060 | 2058 | |
|
2061 | 2059 | |
|
2062 | 2060 | class HasRepoGroupPermissionAny(PermsFunction): |
|
2063 | 2061 | def __call__(self, group_name=None, check_location='', user=None): |
|
2064 | 2062 | self.repo_group_name = group_name |
|
2065 | 2063 | return super(HasRepoGroupPermissionAny, self).__call__( |
|
2066 | 2064 | check_location, user) |
|
2067 | 2065 | |
|
2068 | 2066 | def check_permissions(self, user): |
|
2069 | 2067 | perms = user.permissions |
|
2070 | 2068 | try: |
|
2071 | 2069 | user_perms = {perms['repositories_groups'][self.repo_group_name]} |
|
2072 | 2070 | except KeyError: |
|
2073 | 2071 | return False |
|
2074 | 2072 | if self.required_perms.intersection(user_perms): |
|
2075 | 2073 | return True |
|
2076 | 2074 | return False |
|
2077 | 2075 | |
|
2078 | 2076 | |
|
2079 | 2077 | class HasRepoGroupPermissionAll(PermsFunction): |
|
2080 | 2078 | def __call__(self, group_name=None, check_location='', user=None): |
|
2081 | 2079 | self.repo_group_name = group_name |
|
2082 | 2080 | return super(HasRepoGroupPermissionAll, self).__call__( |
|
2083 | 2081 | check_location, user) |
|
2084 | 2082 | |
|
2085 | 2083 | def check_permissions(self, user): |
|
2086 | 2084 | perms = user.permissions |
|
2087 | 2085 | try: |
|
2088 | 2086 | user_perms = {perms['repositories_groups'][self.repo_group_name]} |
|
2089 | 2087 | except KeyError: |
|
2090 | 2088 | return False |
|
2091 | 2089 | if self.required_perms.issubset(user_perms): |
|
2092 | 2090 | return True |
|
2093 | 2091 | return False |
|
2094 | 2092 | |
|
2095 | 2093 | |
|
2096 | 2094 | class HasUserGroupPermissionAny(PermsFunction): |
|
2097 | 2095 | def __call__(self, user_group_name=None, check_location='', user=None): |
|
2098 | 2096 | self.user_group_name = user_group_name |
|
2099 | 2097 | return super(HasUserGroupPermissionAny, self).__call__( |
|
2100 | 2098 | check_location, user) |
|
2101 | 2099 | |
|
2102 | 2100 | def check_permissions(self, user): |
|
2103 | 2101 | perms = user.permissions |
|
2104 | 2102 | try: |
|
2105 | 2103 | user_perms = {perms['user_groups'][self.user_group_name]} |
|
2106 | 2104 | except KeyError: |
|
2107 | 2105 | return False |
|
2108 | 2106 | if self.required_perms.intersection(user_perms): |
|
2109 | 2107 | return True |
|
2110 | 2108 | return False |
|
2111 | 2109 | |
|
2112 | 2110 | |
|
2113 | 2111 | class HasUserGroupPermissionAll(PermsFunction): |
|
2114 | 2112 | def __call__(self, user_group_name=None, check_location='', user=None): |
|
2115 | 2113 | self.user_group_name = user_group_name |
|
2116 | 2114 | return super(HasUserGroupPermissionAll, self).__call__( |
|
2117 | 2115 | check_location, user) |
|
2118 | 2116 | |
|
2119 | 2117 | def check_permissions(self, user): |
|
2120 | 2118 | perms = user.permissions |
|
2121 | 2119 | try: |
|
2122 | 2120 | user_perms = {perms['user_groups'][self.user_group_name]} |
|
2123 | 2121 | except KeyError: |
|
2124 | 2122 | return False |
|
2125 | 2123 | if self.required_perms.issubset(user_perms): |
|
2126 | 2124 | return True |
|
2127 | 2125 | return False |
|
2128 | 2126 | |
|
2129 | 2127 | |
|
2130 | 2128 | # SPECIAL VERSION TO HANDLE MIDDLEWARE AUTH |
|
2131 | 2129 | class HasPermissionAnyMiddleware(object): |
|
2132 | 2130 | def __init__(self, *perms): |
|
2133 | 2131 | self.required_perms = set(perms) |
|
2134 | 2132 | |
|
2135 | 2133 | def __call__(self, auth_user, repo_name): |
|
2136 | 2134 | # repo_name MUST be unicode, since we handle keys in permission |
|
2137 | 2135 | # dict by unicode |
|
2138 | 2136 | repo_name = safe_unicode(repo_name) |
|
2139 | 2137 | log.debug( |
|
2140 | 2138 | 'Checking VCS protocol permissions %s for user:%s repo:`%s`', |
|
2141 | 2139 | self.required_perms, auth_user, repo_name) |
|
2142 | 2140 | |
|
2143 | 2141 | if self.check_permissions(auth_user, repo_name): |
|
2144 | 2142 | log.debug('Permission to repo:`%s` GRANTED for user:%s @ %s', |
|
2145 | 2143 | repo_name, auth_user, 'PermissionMiddleware') |
|
2146 | 2144 | return True |
|
2147 | 2145 | |
|
2148 | 2146 | else: |
|
2149 | 2147 | log.debug('Permission to repo:`%s` DENIED for user:%s @ %s', |
|
2150 | 2148 | repo_name, auth_user, 'PermissionMiddleware') |
|
2151 | 2149 | return False |
|
2152 | 2150 | |
|
2153 | 2151 | def check_permissions(self, user, repo_name): |
|
2154 | 2152 | perms = user.permissions_with_scope({'repo_name': repo_name}) |
|
2155 | 2153 | |
|
2156 | 2154 | try: |
|
2157 | 2155 | user_perms = {perms['repositories'][repo_name]} |
|
2158 | 2156 | except Exception: |
|
2159 | 2157 | log.exception('Error while accessing user permissions') |
|
2160 | 2158 | return False |
|
2161 | 2159 | |
|
2162 | 2160 | if self.required_perms.intersection(user_perms): |
|
2163 | 2161 | return True |
|
2164 | 2162 | return False |
|
2165 | 2163 | |
|
2166 | 2164 | |
|
2167 | 2165 | # SPECIAL VERSION TO HANDLE API AUTH |
|
2168 | 2166 | class _BaseApiPerm(object): |
|
2169 | 2167 | def __init__(self, *perms): |
|
2170 | 2168 | self.required_perms = set(perms) |
|
2171 | 2169 | |
|
2172 | 2170 | def __call__(self, check_location=None, user=None, repo_name=None, |
|
2173 | 2171 | group_name=None, user_group_name=None): |
|
2174 | 2172 | cls_name = self.__class__.__name__ |
|
2175 | 2173 | check_scope = 'global:%s' % (self.required_perms,) |
|
2176 | 2174 | if repo_name: |
|
2177 | 2175 | check_scope += ', repo_name:%s' % (repo_name,) |
|
2178 | 2176 | |
|
2179 | 2177 | if group_name: |
|
2180 | 2178 | check_scope += ', repo_group_name:%s' % (group_name,) |
|
2181 | 2179 | |
|
2182 | 2180 | if user_group_name: |
|
2183 | 2181 | check_scope += ', user_group_name:%s' % (user_group_name,) |
|
2184 | 2182 | |
|
2185 | log.debug( | |
|
2186 | 'checking cls:%s %s %s @ %s' | |
|
2187 | % (cls_name, self.required_perms, check_scope, check_location)) | |
|
2183 | log.debug('checking cls:%s %s %s @ %s', | |
|
2184 | cls_name, self.required_perms, check_scope, check_location) | |
|
2188 | 2185 | if not user: |
|
2189 | 2186 | log.debug('Empty User passed into arguments') |
|
2190 | 2187 | return False |
|
2191 | 2188 | |
|
2192 | 2189 | # process user |
|
2193 | 2190 | if not isinstance(user, AuthUser): |
|
2194 | 2191 | user = AuthUser(user.user_id) |
|
2195 | 2192 | if not check_location: |
|
2196 | 2193 | check_location = 'unspecified' |
|
2197 | 2194 | if self.check_permissions(user.permissions, repo_name, group_name, |
|
2198 | 2195 | user_group_name): |
|
2199 | 2196 | log.debug('Permission to repo:`%s` GRANTED for user:`%s` @ %s', |
|
2200 | 2197 | check_scope, user, check_location) |
|
2201 | 2198 | return True |
|
2202 | 2199 | |
|
2203 | 2200 | else: |
|
2204 | 2201 | log.debug('Permission to repo:`%s` DENIED for user:`%s` @ %s', |
|
2205 | 2202 | check_scope, user, check_location) |
|
2206 | 2203 | return False |
|
2207 | 2204 | |
|
2208 | 2205 | def check_permissions(self, perm_defs, repo_name=None, group_name=None, |
|
2209 | 2206 | user_group_name=None): |
|
2210 | 2207 | """ |
|
2211 | 2208 | implement in child class should return True if permissions are ok, |
|
2212 | 2209 | False otherwise |
|
2213 | 2210 | |
|
2214 | 2211 | :param perm_defs: dict with permission definitions |
|
2215 | 2212 | :param repo_name: repo name |
|
2216 | 2213 | """ |
|
2217 | 2214 | raise NotImplementedError() |
|
2218 | 2215 | |
|
2219 | 2216 | |
|
2220 | 2217 | class HasPermissionAllApi(_BaseApiPerm): |
|
2221 | 2218 | def check_permissions(self, perm_defs, repo_name=None, group_name=None, |
|
2222 | 2219 | user_group_name=None): |
|
2223 | 2220 | if self.required_perms.issubset(perm_defs.get('global')): |
|
2224 | 2221 | return True |
|
2225 | 2222 | return False |
|
2226 | 2223 | |
|
2227 | 2224 | |
|
2228 | 2225 | class HasPermissionAnyApi(_BaseApiPerm): |
|
2229 | 2226 | def check_permissions(self, perm_defs, repo_name=None, group_name=None, |
|
2230 | 2227 | user_group_name=None): |
|
2231 | 2228 | if self.required_perms.intersection(perm_defs.get('global')): |
|
2232 | 2229 | return True |
|
2233 | 2230 | return False |
|
2234 | 2231 | |
|
2235 | 2232 | |
|
2236 | 2233 | class HasRepoPermissionAllApi(_BaseApiPerm): |
|
2237 | 2234 | def check_permissions(self, perm_defs, repo_name=None, group_name=None, |
|
2238 | 2235 | user_group_name=None): |
|
2239 | 2236 | try: |
|
2240 | 2237 | _user_perms = {perm_defs['repositories'][repo_name]} |
|
2241 | 2238 | except KeyError: |
|
2242 | 2239 | log.warning(traceback.format_exc()) |
|
2243 | 2240 | return False |
|
2244 | 2241 | if self.required_perms.issubset(_user_perms): |
|
2245 | 2242 | return True |
|
2246 | 2243 | return False |
|
2247 | 2244 | |
|
2248 | 2245 | |
|
2249 | 2246 | class HasRepoPermissionAnyApi(_BaseApiPerm): |
|
2250 | 2247 | def check_permissions(self, perm_defs, repo_name=None, group_name=None, |
|
2251 | 2248 | user_group_name=None): |
|
2252 | 2249 | try: |
|
2253 | 2250 | _user_perms = {perm_defs['repositories'][repo_name]} |
|
2254 | 2251 | except KeyError: |
|
2255 | 2252 | log.warning(traceback.format_exc()) |
|
2256 | 2253 | return False |
|
2257 | 2254 | if self.required_perms.intersection(_user_perms): |
|
2258 | 2255 | return True |
|
2259 | 2256 | return False |
|
2260 | 2257 | |
|
2261 | 2258 | |
|
2262 | 2259 | class HasRepoGroupPermissionAnyApi(_BaseApiPerm): |
|
2263 | 2260 | def check_permissions(self, perm_defs, repo_name=None, group_name=None, |
|
2264 | 2261 | user_group_name=None): |
|
2265 | 2262 | try: |
|
2266 | 2263 | _user_perms = {perm_defs['repositories_groups'][group_name]} |
|
2267 | 2264 | except KeyError: |
|
2268 | 2265 | log.warning(traceback.format_exc()) |
|
2269 | 2266 | return False |
|
2270 | 2267 | if self.required_perms.intersection(_user_perms): |
|
2271 | 2268 | return True |
|
2272 | 2269 | return False |
|
2273 | 2270 | |
|
2274 | 2271 | |
|
2275 | 2272 | class HasRepoGroupPermissionAllApi(_BaseApiPerm): |
|
2276 | 2273 | def check_permissions(self, perm_defs, repo_name=None, group_name=None, |
|
2277 | 2274 | user_group_name=None): |
|
2278 | 2275 | try: |
|
2279 | 2276 | _user_perms = {perm_defs['repositories_groups'][group_name]} |
|
2280 | 2277 | except KeyError: |
|
2281 | 2278 | log.warning(traceback.format_exc()) |
|
2282 | 2279 | return False |
|
2283 | 2280 | if self.required_perms.issubset(_user_perms): |
|
2284 | 2281 | return True |
|
2285 | 2282 | return False |
|
2286 | 2283 | |
|
2287 | 2284 | |
|
2288 | 2285 | class HasUserGroupPermissionAnyApi(_BaseApiPerm): |
|
2289 | 2286 | def check_permissions(self, perm_defs, repo_name=None, group_name=None, |
|
2290 | 2287 | user_group_name=None): |
|
2291 | 2288 | try: |
|
2292 | 2289 | _user_perms = {perm_defs['user_groups'][user_group_name]} |
|
2293 | 2290 | except KeyError: |
|
2294 | 2291 | log.warning(traceback.format_exc()) |
|
2295 | 2292 | return False |
|
2296 | 2293 | if self.required_perms.intersection(_user_perms): |
|
2297 | 2294 | return True |
|
2298 | 2295 | return False |
|
2299 | 2296 | |
|
2300 | 2297 | |
|
2301 | 2298 | def check_ip_access(source_ip, allowed_ips=None): |
|
2302 | 2299 | """ |
|
2303 | 2300 | Checks if source_ip is a subnet of any of allowed_ips. |
|
2304 | 2301 | |
|
2305 | 2302 | :param source_ip: |
|
2306 | 2303 | :param allowed_ips: list of allowed ips together with mask |
|
2307 | 2304 | """ |
|
2308 |
log.debug('checking if ip:%s is subnet of %s' |
|
|
2305 | log.debug('checking if ip:%s is subnet of %s', source_ip, allowed_ips) | |
|
2309 | 2306 | source_ip_address = ipaddress.ip_address(safe_unicode(source_ip)) |
|
2310 | 2307 | if isinstance(allowed_ips, (tuple, list, set)): |
|
2311 | 2308 | for ip in allowed_ips: |
|
2312 | 2309 | ip = safe_unicode(ip) |
|
2313 | 2310 | try: |
|
2314 | 2311 | network_address = ipaddress.ip_network(ip, strict=False) |
|
2315 | 2312 | if source_ip_address in network_address: |
|
2316 |
log.debug('IP %s is network %s' |
|
|
2317 | (source_ip_address, network_address)) | |
|
2313 | log.debug('IP %s is network %s', source_ip_address, network_address) | |
|
2318 | 2314 | return True |
|
2319 | 2315 | # for any case we cannot determine the IP, don't crash just |
|
2320 | 2316 | # skip it and log as error, we want to say forbidden still when |
|
2321 | 2317 | # sending bad IP |
|
2322 | 2318 | except Exception: |
|
2323 | 2319 | log.error(traceback.format_exc()) |
|
2324 | 2320 | continue |
|
2325 | 2321 | return False |
|
2326 | 2322 | |
|
2327 | 2323 | |
|
2328 | 2324 | def get_cython_compat_decorator(wrapper, func): |
|
2329 | 2325 | """ |
|
2330 | 2326 | Creates a cython compatible decorator. The previously used |
|
2331 | 2327 | decorator.decorator() function seems to be incompatible with cython. |
|
2332 | 2328 | |
|
2333 | 2329 | :param wrapper: __wrapper method of the decorator class |
|
2334 | 2330 | :param func: decorated function |
|
2335 | 2331 | """ |
|
2336 | 2332 | @wraps(func) |
|
2337 | 2333 | def local_wrapper(*args, **kwds): |
|
2338 | 2334 | return wrapper(func, *args, **kwds) |
|
2339 | 2335 | local_wrapper.__wrapped__ = func |
|
2340 | 2336 | return local_wrapper |
|
2341 | 2337 | |
|
2342 | 2338 |
@@ -1,550 +1,550 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2018 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | The base Controller API |
|
23 | 23 | Provides the BaseController class for subclassing. And usage in different |
|
24 | 24 | controllers |
|
25 | 25 | """ |
|
26 | 26 | |
|
27 | 27 | import logging |
|
28 | 28 | import socket |
|
29 | 29 | |
|
30 | 30 | import markupsafe |
|
31 | 31 | import ipaddress |
|
32 | 32 | |
|
33 | 33 | from paste.auth.basic import AuthBasicAuthenticator |
|
34 | 34 | from paste.httpexceptions import HTTPUnauthorized, HTTPForbidden, get_exception |
|
35 | 35 | from paste.httpheaders import WWW_AUTHENTICATE, AUTHORIZATION |
|
36 | 36 | |
|
37 | 37 | import rhodecode |
|
38 | 38 | from rhodecode.authentication.base import VCS_TYPE |
|
39 | 39 | from rhodecode.lib import auth, utils2 |
|
40 | 40 | from rhodecode.lib import helpers as h |
|
41 | 41 | from rhodecode.lib.auth import AuthUser, CookieStoreWrapper |
|
42 | 42 | from rhodecode.lib.exceptions import UserCreationError |
|
43 | 43 | from rhodecode.lib.utils import (password_changed, get_enabled_hook_classes) |
|
44 | 44 | from rhodecode.lib.utils2 import ( |
|
45 | 45 | str2bool, safe_unicode, AttributeDict, safe_int, sha1, aslist, safe_str) |
|
46 | 46 | from rhodecode.model.db import Repository, User, ChangesetComment |
|
47 | 47 | from rhodecode.model.notification import NotificationModel |
|
48 | 48 | from rhodecode.model.settings import VcsSettingsModel, SettingsModel |
|
49 | 49 | |
|
50 | 50 | log = logging.getLogger(__name__) |
|
51 | 51 | |
|
52 | 52 | |
|
53 | 53 | def _filter_proxy(ip): |
|
54 | 54 | """ |
|
55 | 55 | Passed in IP addresses in HEADERS can be in a special format of multiple |
|
56 | 56 | ips. Those comma separated IPs are passed from various proxies in the |
|
57 | 57 | chain of request processing. The left-most being the original client. |
|
58 | 58 | We only care about the first IP which came from the org. client. |
|
59 | 59 | |
|
60 | 60 | :param ip: ip string from headers |
|
61 | 61 | """ |
|
62 | 62 | if ',' in ip: |
|
63 | 63 | _ips = ip.split(',') |
|
64 | 64 | _first_ip = _ips[0].strip() |
|
65 | 65 | log.debug('Got multiple IPs %s, using %s', ','.join(_ips), _first_ip) |
|
66 | 66 | return _first_ip |
|
67 | 67 | return ip |
|
68 | 68 | |
|
69 | 69 | |
|
70 | 70 | def _filter_port(ip): |
|
71 | 71 | """ |
|
72 | 72 | Removes a port from ip, there are 4 main cases to handle here. |
|
73 | 73 | - ipv4 eg. 127.0.0.1 |
|
74 | 74 | - ipv6 eg. ::1 |
|
75 | 75 | - ipv4+port eg. 127.0.0.1:8080 |
|
76 | 76 | - ipv6+port eg. [::1]:8080 |
|
77 | 77 | |
|
78 | 78 | :param ip: |
|
79 | 79 | """ |
|
80 | 80 | def is_ipv6(ip_addr): |
|
81 | 81 | if hasattr(socket, 'inet_pton'): |
|
82 | 82 | try: |
|
83 | 83 | socket.inet_pton(socket.AF_INET6, ip_addr) |
|
84 | 84 | except socket.error: |
|
85 | 85 | return False |
|
86 | 86 | else: |
|
87 | 87 | # fallback to ipaddress |
|
88 | 88 | try: |
|
89 | 89 | ipaddress.IPv6Address(safe_unicode(ip_addr)) |
|
90 | 90 | except Exception: |
|
91 | 91 | return False |
|
92 | 92 | return True |
|
93 | 93 | |
|
94 | 94 | if ':' not in ip: # must be ipv4 pure ip |
|
95 | 95 | return ip |
|
96 | 96 | |
|
97 | 97 | if '[' in ip and ']' in ip: # ipv6 with port |
|
98 | 98 | return ip.split(']')[0][1:].lower() |
|
99 | 99 | |
|
100 | 100 | # must be ipv6 or ipv4 with port |
|
101 | 101 | if is_ipv6(ip): |
|
102 | 102 | return ip |
|
103 | 103 | else: |
|
104 | 104 | ip, _port = ip.split(':')[:2] # means ipv4+port |
|
105 | 105 | return ip |
|
106 | 106 | |
|
107 | 107 | |
|
108 | 108 | def get_ip_addr(environ): |
|
109 | 109 | proxy_key = 'HTTP_X_REAL_IP' |
|
110 | 110 | proxy_key2 = 'HTTP_X_FORWARDED_FOR' |
|
111 | 111 | def_key = 'REMOTE_ADDR' |
|
112 | 112 | _filters = lambda x: _filter_port(_filter_proxy(x)) |
|
113 | 113 | |
|
114 | 114 | ip = environ.get(proxy_key) |
|
115 | 115 | if ip: |
|
116 | 116 | return _filters(ip) |
|
117 | 117 | |
|
118 | 118 | ip = environ.get(proxy_key2) |
|
119 | 119 | if ip: |
|
120 | 120 | return _filters(ip) |
|
121 | 121 | |
|
122 | 122 | ip = environ.get(def_key, '0.0.0.0') |
|
123 | 123 | return _filters(ip) |
|
124 | 124 | |
|
125 | 125 | |
|
126 | 126 | def get_server_ip_addr(environ, log_errors=True): |
|
127 | 127 | hostname = environ.get('SERVER_NAME') |
|
128 | 128 | try: |
|
129 | 129 | return socket.gethostbyname(hostname) |
|
130 | 130 | except Exception as e: |
|
131 | 131 | if log_errors: |
|
132 | 132 | # in some cases this lookup is not possible, and we don't want to |
|
133 | 133 | # make it an exception in logs |
|
134 | 134 | log.exception('Could not retrieve server ip address: %s', e) |
|
135 | 135 | return hostname |
|
136 | 136 | |
|
137 | 137 | |
|
138 | 138 | def get_server_port(environ): |
|
139 | 139 | return environ.get('SERVER_PORT') |
|
140 | 140 | |
|
141 | 141 | |
|
142 | 142 | def get_access_path(environ): |
|
143 | 143 | path = environ.get('PATH_INFO') |
|
144 | 144 | org_req = environ.get('pylons.original_request') |
|
145 | 145 | if org_req: |
|
146 | 146 | path = org_req.environ.get('PATH_INFO') |
|
147 | 147 | return path |
|
148 | 148 | |
|
149 | 149 | |
|
150 | 150 | def get_user_agent(environ): |
|
151 | 151 | return environ.get('HTTP_USER_AGENT') |
|
152 | 152 | |
|
153 | 153 | |
|
154 | 154 | def vcs_operation_context( |
|
155 | 155 | environ, repo_name, username, action, scm, check_locking=True, |
|
156 | 156 | is_shadow_repo=False, check_branch_perms=False, detect_force_push=False): |
|
157 | 157 | """ |
|
158 | 158 | Generate the context for a vcs operation, e.g. push or pull. |
|
159 | 159 | |
|
160 | 160 | This context is passed over the layers so that hooks triggered by the |
|
161 | 161 | vcs operation know details like the user, the user's IP address etc. |
|
162 | 162 | |
|
163 | 163 | :param check_locking: Allows to switch of the computation of the locking |
|
164 | 164 | data. This serves mainly the need of the simplevcs middleware to be |
|
165 | 165 | able to disable this for certain operations. |
|
166 | 166 | |
|
167 | 167 | """ |
|
168 | 168 | # Tri-state value: False: unlock, None: nothing, True: lock |
|
169 | 169 | make_lock = None |
|
170 | 170 | locked_by = [None, None, None] |
|
171 | 171 | is_anonymous = username == User.DEFAULT_USER |
|
172 | 172 | user = User.get_by_username(username) |
|
173 | 173 | if not is_anonymous and check_locking: |
|
174 | 174 | log.debug('Checking locking on repository "%s"', repo_name) |
|
175 | 175 | repo = Repository.get_by_repo_name(repo_name) |
|
176 | 176 | make_lock, __, locked_by = repo.get_locking_state( |
|
177 | 177 | action, user.user_id) |
|
178 | 178 | user_id = user.user_id |
|
179 | 179 | settings_model = VcsSettingsModel(repo=repo_name) |
|
180 | 180 | ui_settings = settings_model.get_ui_settings() |
|
181 | 181 | |
|
182 | 182 | # NOTE(marcink): This should be also in sync with |
|
183 | 183 | # rhodecode/apps/ssh_support/lib/backends/base.py:update_enviroment scm_data |
|
184 | 184 | scm_data = { |
|
185 | 185 | 'ip': get_ip_addr(environ), |
|
186 | 186 | 'username': username, |
|
187 | 187 | 'user_id': user_id, |
|
188 | 188 | 'action': action, |
|
189 | 189 | 'repository': repo_name, |
|
190 | 190 | 'scm': scm, |
|
191 | 191 | 'config': rhodecode.CONFIG['__file__'], |
|
192 | 192 | 'make_lock': make_lock, |
|
193 | 193 | 'locked_by': locked_by, |
|
194 | 194 | 'server_url': utils2.get_server_url(environ), |
|
195 | 195 | 'user_agent': get_user_agent(environ), |
|
196 | 196 | 'hooks': get_enabled_hook_classes(ui_settings), |
|
197 | 197 | 'is_shadow_repo': is_shadow_repo, |
|
198 | 198 | 'detect_force_push': detect_force_push, |
|
199 | 199 | 'check_branch_perms': check_branch_perms, |
|
200 | 200 | } |
|
201 | 201 | return scm_data |
|
202 | 202 | |
|
203 | 203 | |
|
204 | 204 | class BasicAuth(AuthBasicAuthenticator): |
|
205 | 205 | |
|
206 | 206 | def __init__(self, realm, authfunc, registry, auth_http_code=None, |
|
207 | 207 | initial_call_detection=False, acl_repo_name=None): |
|
208 | 208 | self.realm = realm |
|
209 | 209 | self.initial_call = initial_call_detection |
|
210 | 210 | self.authfunc = authfunc |
|
211 | 211 | self.registry = registry |
|
212 | 212 | self.acl_repo_name = acl_repo_name |
|
213 | 213 | self._rc_auth_http_code = auth_http_code |
|
214 | 214 | |
|
215 | 215 | def _get_response_from_code(self, http_code): |
|
216 | 216 | try: |
|
217 | 217 | return get_exception(safe_int(http_code)) |
|
218 | 218 | except Exception: |
|
219 |
log.exception('Failed to fetch response for code %s' |
|
|
219 | log.exception('Failed to fetch response for code %s', http_code) | |
|
220 | 220 | return HTTPForbidden |
|
221 | 221 | |
|
222 | 222 | def get_rc_realm(self): |
|
223 | 223 | return safe_str(self.registry.rhodecode_settings.get('rhodecode_realm')) |
|
224 | 224 | |
|
225 | 225 | def build_authentication(self): |
|
226 | 226 | head = WWW_AUTHENTICATE.tuples('Basic realm="%s"' % self.realm) |
|
227 | 227 | if self._rc_auth_http_code and not self.initial_call: |
|
228 | 228 | # return alternative HTTP code if alternative http return code |
|
229 | 229 | # is specified in RhodeCode config, but ONLY if it's not the |
|
230 | 230 | # FIRST call |
|
231 | 231 | custom_response_klass = self._get_response_from_code( |
|
232 | 232 | self._rc_auth_http_code) |
|
233 | 233 | return custom_response_klass(headers=head) |
|
234 | 234 | return HTTPUnauthorized(headers=head) |
|
235 | 235 | |
|
236 | 236 | def authenticate(self, environ): |
|
237 | 237 | authorization = AUTHORIZATION(environ) |
|
238 | 238 | if not authorization: |
|
239 | 239 | return self.build_authentication() |
|
240 | 240 | (authmeth, auth) = authorization.split(' ', 1) |
|
241 | 241 | if 'basic' != authmeth.lower(): |
|
242 | 242 | return self.build_authentication() |
|
243 | 243 | auth = auth.strip().decode('base64') |
|
244 | 244 | _parts = auth.split(':', 1) |
|
245 | 245 | if len(_parts) == 2: |
|
246 | 246 | username, password = _parts |
|
247 | 247 | auth_data = self.authfunc( |
|
248 | 248 | username, password, environ, VCS_TYPE, |
|
249 | 249 | registry=self.registry, acl_repo_name=self.acl_repo_name) |
|
250 | 250 | if auth_data: |
|
251 | 251 | return {'username': username, 'auth_data': auth_data} |
|
252 | 252 | if username and password: |
|
253 | 253 | # we mark that we actually executed authentication once, at |
|
254 | 254 | # that point we can use the alternative auth code |
|
255 | 255 | self.initial_call = False |
|
256 | 256 | |
|
257 | 257 | return self.build_authentication() |
|
258 | 258 | |
|
259 | 259 | __call__ = authenticate |
|
260 | 260 | |
|
261 | 261 | |
|
262 | 262 | def calculate_version_hash(config): |
|
263 | 263 | return sha1( |
|
264 | 264 | config.get('beaker.session.secret', '') + |
|
265 | 265 | rhodecode.__version__)[:8] |
|
266 | 266 | |
|
267 | 267 | |
|
268 | 268 | def get_current_lang(request): |
|
269 | 269 | # NOTE(marcink): remove after pyramid move |
|
270 | 270 | try: |
|
271 | 271 | return translation.get_lang()[0] |
|
272 | 272 | except: |
|
273 | 273 | pass |
|
274 | 274 | |
|
275 | 275 | return getattr(request, '_LOCALE_', request.locale_name) |
|
276 | 276 | |
|
277 | 277 | |
|
278 | 278 | def attach_context_attributes(context, request, user_id): |
|
279 | 279 | """ |
|
280 | 280 | Attach variables into template context called `c`. |
|
281 | 281 | """ |
|
282 | 282 | config = request.registry.settings |
|
283 | 283 | |
|
284 | 284 | |
|
285 | 285 | rc_config = SettingsModel().get_all_settings(cache=True) |
|
286 | 286 | |
|
287 | 287 | context.rhodecode_version = rhodecode.__version__ |
|
288 | 288 | context.rhodecode_edition = config.get('rhodecode.edition') |
|
289 | 289 | # unique secret + version does not leak the version but keep consistency |
|
290 | 290 | context.rhodecode_version_hash = calculate_version_hash(config) |
|
291 | 291 | |
|
292 | 292 | # Default language set for the incoming request |
|
293 | 293 | context.language = get_current_lang(request) |
|
294 | 294 | |
|
295 | 295 | # Visual options |
|
296 | 296 | context.visual = AttributeDict({}) |
|
297 | 297 | |
|
298 | 298 | # DB stored Visual Items |
|
299 | 299 | context.visual.show_public_icon = str2bool( |
|
300 | 300 | rc_config.get('rhodecode_show_public_icon')) |
|
301 | 301 | context.visual.show_private_icon = str2bool( |
|
302 | 302 | rc_config.get('rhodecode_show_private_icon')) |
|
303 | 303 | context.visual.stylify_metatags = str2bool( |
|
304 | 304 | rc_config.get('rhodecode_stylify_metatags')) |
|
305 | 305 | context.visual.dashboard_items = safe_int( |
|
306 | 306 | rc_config.get('rhodecode_dashboard_items', 100)) |
|
307 | 307 | context.visual.admin_grid_items = safe_int( |
|
308 | 308 | rc_config.get('rhodecode_admin_grid_items', 100)) |
|
309 | 309 | context.visual.repository_fields = str2bool( |
|
310 | 310 | rc_config.get('rhodecode_repository_fields')) |
|
311 | 311 | context.visual.show_version = str2bool( |
|
312 | 312 | rc_config.get('rhodecode_show_version')) |
|
313 | 313 | context.visual.use_gravatar = str2bool( |
|
314 | 314 | rc_config.get('rhodecode_use_gravatar')) |
|
315 | 315 | context.visual.gravatar_url = rc_config.get('rhodecode_gravatar_url') |
|
316 | 316 | context.visual.default_renderer = rc_config.get( |
|
317 | 317 | 'rhodecode_markup_renderer', 'rst') |
|
318 | 318 | context.visual.comment_types = ChangesetComment.COMMENT_TYPES |
|
319 | 319 | context.visual.rhodecode_support_url = \ |
|
320 | 320 | rc_config.get('rhodecode_support_url') or h.route_url('rhodecode_support') |
|
321 | 321 | |
|
322 | 322 | context.visual.affected_files_cut_off = 60 |
|
323 | 323 | |
|
324 | 324 | context.pre_code = rc_config.get('rhodecode_pre_code') |
|
325 | 325 | context.post_code = rc_config.get('rhodecode_post_code') |
|
326 | 326 | context.rhodecode_name = rc_config.get('rhodecode_title') |
|
327 | 327 | context.default_encodings = aslist(config.get('default_encoding'), sep=',') |
|
328 | 328 | # if we have specified default_encoding in the request, it has more |
|
329 | 329 | # priority |
|
330 | 330 | if request.GET.get('default_encoding'): |
|
331 | 331 | context.default_encodings.insert(0, request.GET.get('default_encoding')) |
|
332 | 332 | context.clone_uri_tmpl = rc_config.get('rhodecode_clone_uri_tmpl') |
|
333 | 333 | context.clone_uri_ssh_tmpl = rc_config.get('rhodecode_clone_uri_ssh_tmpl') |
|
334 | 334 | |
|
335 | 335 | # INI stored |
|
336 | 336 | context.labs_active = str2bool( |
|
337 | 337 | config.get('labs_settings_active', 'false')) |
|
338 | 338 | context.ssh_enabled = str2bool( |
|
339 | 339 | config.get('ssh.generate_authorized_keyfile', 'false')) |
|
340 | 340 | |
|
341 | 341 | context.visual.allow_repo_location_change = str2bool( |
|
342 | 342 | config.get('allow_repo_location_change', True)) |
|
343 | 343 | context.visual.allow_custom_hooks_settings = str2bool( |
|
344 | 344 | config.get('allow_custom_hooks_settings', True)) |
|
345 | 345 | context.debug_style = str2bool(config.get('debug_style', False)) |
|
346 | 346 | |
|
347 | 347 | context.rhodecode_instanceid = config.get('instance_id') |
|
348 | 348 | |
|
349 | 349 | context.visual.cut_off_limit_diff = safe_int( |
|
350 | 350 | config.get('cut_off_limit_diff')) |
|
351 | 351 | context.visual.cut_off_limit_file = safe_int( |
|
352 | 352 | config.get('cut_off_limit_file')) |
|
353 | 353 | |
|
354 | 354 | # AppEnlight |
|
355 | 355 | context.appenlight_enabled = str2bool(config.get('appenlight', 'false')) |
|
356 | 356 | context.appenlight_api_public_key = config.get( |
|
357 | 357 | 'appenlight.api_public_key', '') |
|
358 | 358 | context.appenlight_server_url = config.get('appenlight.server_url', '') |
|
359 | 359 | |
|
360 | 360 | # JS template context |
|
361 | 361 | context.template_context = { |
|
362 | 362 | 'repo_name': None, |
|
363 | 363 | 'repo_type': None, |
|
364 | 364 | 'repo_landing_commit': None, |
|
365 | 365 | 'rhodecode_user': { |
|
366 | 366 | 'username': None, |
|
367 | 367 | 'email': None, |
|
368 | 368 | 'notification_status': False |
|
369 | 369 | }, |
|
370 | 370 | 'visual': { |
|
371 | 371 | 'default_renderer': None |
|
372 | 372 | }, |
|
373 | 373 | 'commit_data': { |
|
374 | 374 | 'commit_id': None |
|
375 | 375 | }, |
|
376 | 376 | 'pull_request_data': {'pull_request_id': None}, |
|
377 | 377 | 'timeago': { |
|
378 | 378 | 'refresh_time': 120 * 1000, |
|
379 | 379 | 'cutoff_limit': 1000 * 60 * 60 * 24 * 7 |
|
380 | 380 | }, |
|
381 | 381 | 'pyramid_dispatch': { |
|
382 | 382 | |
|
383 | 383 | }, |
|
384 | 384 | 'extra': {'plugins': {}} |
|
385 | 385 | } |
|
386 | 386 | # END CONFIG VARS |
|
387 | 387 | |
|
388 | 388 | diffmode = 'sideside' |
|
389 | 389 | if request.GET.get('diffmode'): |
|
390 | 390 | if request.GET['diffmode'] == 'unified': |
|
391 | 391 | diffmode = 'unified' |
|
392 | 392 | elif request.session.get('diffmode'): |
|
393 | 393 | diffmode = request.session['diffmode'] |
|
394 | 394 | |
|
395 | 395 | context.diffmode = diffmode |
|
396 | 396 | |
|
397 | 397 | if request.session.get('diffmode') != diffmode: |
|
398 | 398 | request.session['diffmode'] = diffmode |
|
399 | 399 | |
|
400 | 400 | context.csrf_token = auth.get_csrf_token(session=request.session) |
|
401 | 401 | context.backends = rhodecode.BACKENDS.keys() |
|
402 | 402 | context.backends.sort() |
|
403 | 403 | context.unread_notifications = NotificationModel().get_unread_cnt_for_user(user_id) |
|
404 | 404 | |
|
405 | 405 | # web case |
|
406 | 406 | if hasattr(request, 'user'): |
|
407 | 407 | context.auth_user = request.user |
|
408 | 408 | context.rhodecode_user = request.user |
|
409 | 409 | |
|
410 | 410 | # api case |
|
411 | 411 | if hasattr(request, 'rpc_user'): |
|
412 | 412 | context.auth_user = request.rpc_user |
|
413 | 413 | context.rhodecode_user = request.rpc_user |
|
414 | 414 | |
|
415 | 415 | # attach the whole call context to the request |
|
416 | 416 | request.call_context = context |
|
417 | 417 | |
|
418 | 418 | |
|
419 | 419 | def get_auth_user(request): |
|
420 | 420 | environ = request.environ |
|
421 | 421 | session = request.session |
|
422 | 422 | |
|
423 | 423 | ip_addr = get_ip_addr(environ) |
|
424 | 424 | # make sure that we update permissions each time we call controller |
|
425 | 425 | _auth_token = (request.GET.get('auth_token', '') or |
|
426 | 426 | request.GET.get('api_key', '')) |
|
427 | 427 | |
|
428 | 428 | if _auth_token: |
|
429 | 429 | # when using API_KEY we assume user exists, and |
|
430 | 430 | # doesn't need auth based on cookies. |
|
431 | 431 | auth_user = AuthUser(api_key=_auth_token, ip_addr=ip_addr) |
|
432 | 432 | authenticated = False |
|
433 | 433 | else: |
|
434 | 434 | cookie_store = CookieStoreWrapper(session.get('rhodecode_user')) |
|
435 | 435 | try: |
|
436 | 436 | auth_user = AuthUser(user_id=cookie_store.get('user_id', None), |
|
437 | 437 | ip_addr=ip_addr) |
|
438 | 438 | except UserCreationError as e: |
|
439 | 439 | h.flash(e, 'error') |
|
440 | 440 | # container auth or other auth functions that create users |
|
441 | 441 | # on the fly can throw this exception signaling that there's |
|
442 | 442 | # issue with user creation, explanation should be provided |
|
443 | 443 | # in Exception itself. We then create a simple blank |
|
444 | 444 | # AuthUser |
|
445 | 445 | auth_user = AuthUser(ip_addr=ip_addr) |
|
446 | 446 | |
|
447 | 447 | # in case someone changes a password for user it triggers session |
|
448 | 448 | # flush and forces a re-login |
|
449 | 449 | if password_changed(auth_user, session): |
|
450 | 450 | session.invalidate() |
|
451 | 451 | cookie_store = CookieStoreWrapper(session.get('rhodecode_user')) |
|
452 | 452 | auth_user = AuthUser(ip_addr=ip_addr) |
|
453 | 453 | |
|
454 | 454 | authenticated = cookie_store.get('is_authenticated') |
|
455 | 455 | |
|
456 | 456 | if not auth_user.is_authenticated and auth_user.is_user_object: |
|
457 | 457 | # user is not authenticated and not empty |
|
458 | 458 | auth_user.set_authenticated(authenticated) |
|
459 | 459 | |
|
460 | 460 | return auth_user |
|
461 | 461 | |
|
462 | 462 | |
|
463 | 463 | def h_filter(s): |
|
464 | 464 | """ |
|
465 | 465 | Custom filter for Mako templates. Mako by standard uses `markupsafe.escape` |
|
466 | 466 | we wrap this with additional functionality that converts None to empty |
|
467 | 467 | strings |
|
468 | 468 | """ |
|
469 | 469 | if s is None: |
|
470 | 470 | return markupsafe.Markup() |
|
471 | 471 | return markupsafe.escape(s) |
|
472 | 472 | |
|
473 | 473 | |
|
474 | 474 | def add_events_routes(config): |
|
475 | 475 | """ |
|
476 | 476 | Adds routing that can be used in events. Because some events are triggered |
|
477 | 477 | outside of pyramid context, we need to bootstrap request with some |
|
478 | 478 | routing registered |
|
479 | 479 | """ |
|
480 | 480 | |
|
481 | 481 | from rhodecode.apps._base import ADMIN_PREFIX |
|
482 | 482 | |
|
483 | 483 | config.add_route(name='home', pattern='/') |
|
484 | 484 | |
|
485 | 485 | config.add_route(name='login', pattern=ADMIN_PREFIX + '/login') |
|
486 | 486 | config.add_route(name='logout', pattern=ADMIN_PREFIX + '/logout') |
|
487 | 487 | config.add_route(name='repo_summary', pattern='/{repo_name}') |
|
488 | 488 | config.add_route(name='repo_summary_explicit', pattern='/{repo_name}/summary') |
|
489 | 489 | config.add_route(name='repo_group_home', pattern='/{repo_group_name}') |
|
490 | 490 | |
|
491 | 491 | config.add_route(name='pullrequest_show', |
|
492 | 492 | pattern='/{repo_name}/pull-request/{pull_request_id}') |
|
493 | 493 | config.add_route(name='pull_requests_global', |
|
494 | 494 | pattern='/pull-request/{pull_request_id}') |
|
495 | 495 | config.add_route(name='repo_commit', |
|
496 | 496 | pattern='/{repo_name}/changeset/{commit_id}') |
|
497 | 497 | |
|
498 | 498 | config.add_route(name='repo_files', |
|
499 | 499 | pattern='/{repo_name}/files/{commit_id}/{f_path}') |
|
500 | 500 | |
|
501 | 501 | |
|
502 | 502 | def bootstrap_config(request): |
|
503 | 503 | import pyramid.testing |
|
504 | 504 | registry = pyramid.testing.Registry('RcTestRegistry') |
|
505 | 505 | |
|
506 | 506 | config = pyramid.testing.setUp(registry=registry, request=request) |
|
507 | 507 | |
|
508 | 508 | # allow pyramid lookup in testing |
|
509 | 509 | config.include('pyramid_mako') |
|
510 | 510 | config.include('pyramid_beaker') |
|
511 | 511 | config.include('rhodecode.lib.rc_cache') |
|
512 | 512 | |
|
513 | 513 | add_events_routes(config) |
|
514 | 514 | |
|
515 | 515 | return config |
|
516 | 516 | |
|
517 | 517 | |
|
518 | 518 | def bootstrap_request(**kwargs): |
|
519 | 519 | import pyramid.testing |
|
520 | 520 | |
|
521 | 521 | class TestRequest(pyramid.testing.DummyRequest): |
|
522 | 522 | application_url = kwargs.pop('application_url', 'http://example.com') |
|
523 | 523 | host = kwargs.pop('host', 'example.com:80') |
|
524 | 524 | domain = kwargs.pop('domain', 'example.com') |
|
525 | 525 | |
|
526 | 526 | def translate(self, msg): |
|
527 | 527 | return msg |
|
528 | 528 | |
|
529 | 529 | def plularize(self, singular, plural, n): |
|
530 | 530 | return singular |
|
531 | 531 | |
|
532 | 532 | def get_partial_renderer(self, tmpl_name): |
|
533 | 533 | |
|
534 | 534 | from rhodecode.lib.partial_renderer import get_partial_renderer |
|
535 | 535 | return get_partial_renderer(request=self, tmpl_name=tmpl_name) |
|
536 | 536 | |
|
537 | 537 | _call_context = {} |
|
538 | 538 | @property |
|
539 | 539 | def call_context(self): |
|
540 | 540 | return self._call_context |
|
541 | 541 | |
|
542 | 542 | class TestDummySession(pyramid.testing.DummySession): |
|
543 | 543 | def save(*arg, **kw): |
|
544 | 544 | pass |
|
545 | 545 | |
|
546 | 546 | request = TestRequest(**kwargs) |
|
547 | 547 | request.session = TestDummySession() |
|
548 | 548 | |
|
549 | 549 | return request |
|
550 | 550 |
@@ -1,745 +1,745 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2011-2018 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import logging |
|
22 | 22 | import difflib |
|
23 | 23 | from itertools import groupby |
|
24 | 24 | |
|
25 | 25 | from pygments import lex |
|
26 | 26 | from pygments.formatters.html import _get_ttype_class as pygment_token_class |
|
27 | 27 | from pygments.lexers.special import TextLexer, Token |
|
28 | 28 | |
|
29 | 29 | from rhodecode.lib.helpers import ( |
|
30 | 30 | get_lexer_for_filenode, html_escape, get_custom_lexer) |
|
31 | 31 | from rhodecode.lib.utils2 import AttributeDict, StrictAttributeDict |
|
32 | 32 | from rhodecode.lib.vcs.nodes import FileNode |
|
33 | 33 | from rhodecode.lib.vcs.exceptions import VCSError, NodeDoesNotExistError |
|
34 | 34 | from rhodecode.lib.diff_match_patch import diff_match_patch |
|
35 | 35 | from rhodecode.lib.diffs import LimitedDiffContainer |
|
36 | 36 | from pygments.lexers import get_lexer_by_name |
|
37 | 37 | |
|
38 | 38 | plain_text_lexer = get_lexer_by_name( |
|
39 | 39 | 'text', stripall=False, stripnl=False, ensurenl=False) |
|
40 | 40 | |
|
41 | 41 | |
|
42 | 42 | log = logging.getLogger(__name__) |
|
43 | 43 | |
|
44 | 44 | |
|
45 | 45 | def filenode_as_lines_tokens(filenode, lexer=None): |
|
46 | 46 | org_lexer = lexer |
|
47 | 47 | lexer = lexer or get_lexer_for_filenode(filenode) |
|
48 | 48 | log.debug('Generating file node pygment tokens for %s, %s, org_lexer:%s', |
|
49 | 49 | lexer, filenode, org_lexer) |
|
50 | 50 | tokens = tokenize_string(filenode.content, lexer) |
|
51 | 51 | lines = split_token_stream(tokens) |
|
52 | 52 | rv = list(lines) |
|
53 | 53 | return rv |
|
54 | 54 | |
|
55 | 55 | |
|
56 | 56 | def tokenize_string(content, lexer): |
|
57 | 57 | """ |
|
58 | 58 | Use pygments to tokenize some content based on a lexer |
|
59 | 59 | ensuring all original new lines and whitespace is preserved |
|
60 | 60 | """ |
|
61 | 61 | |
|
62 | 62 | lexer.stripall = False |
|
63 | 63 | lexer.stripnl = False |
|
64 | 64 | lexer.ensurenl = False |
|
65 | 65 | |
|
66 | 66 | if isinstance(lexer, TextLexer): |
|
67 | 67 | lexed = [(Token.Text, content)] |
|
68 | 68 | else: |
|
69 | 69 | lexed = lex(content, lexer) |
|
70 | 70 | |
|
71 | 71 | for token_type, token_text in lexed: |
|
72 | 72 | yield pygment_token_class(token_type), token_text |
|
73 | 73 | |
|
74 | 74 | |
|
75 | 75 | def split_token_stream(tokens): |
|
76 | 76 | """ |
|
77 | 77 | Take a list of (TokenType, text) tuples and split them by a string |
|
78 | 78 | |
|
79 | 79 | split_token_stream([(TEXT, 'some\ntext'), (TEXT, 'more\n')]) |
|
80 | 80 | [(TEXT, 'some'), (TEXT, 'text'), |
|
81 | 81 | (TEXT, 'more'), (TEXT, 'text')] |
|
82 | 82 | """ |
|
83 | 83 | |
|
84 | 84 | buffer = [] |
|
85 | 85 | for token_class, token_text in tokens: |
|
86 | 86 | parts = token_text.split('\n') |
|
87 | 87 | for part in parts[:-1]: |
|
88 | 88 | buffer.append((token_class, part)) |
|
89 | 89 | yield buffer |
|
90 | 90 | buffer = [] |
|
91 | 91 | |
|
92 | 92 | buffer.append((token_class, parts[-1])) |
|
93 | 93 | |
|
94 | 94 | if buffer: |
|
95 | 95 | yield buffer |
|
96 | 96 | |
|
97 | 97 | |
|
98 | 98 | def filenode_as_annotated_lines_tokens(filenode): |
|
99 | 99 | """ |
|
100 | 100 | Take a file node and return a list of annotations => lines, if no annotation |
|
101 | 101 | is found, it will be None. |
|
102 | 102 | |
|
103 | 103 | eg: |
|
104 | 104 | |
|
105 | 105 | [ |
|
106 | 106 | (annotation1, [ |
|
107 | 107 | (1, line1_tokens_list), |
|
108 | 108 | (2, line2_tokens_list), |
|
109 | 109 | ]), |
|
110 | 110 | (annotation2, [ |
|
111 | 111 | (3, line1_tokens_list), |
|
112 | 112 | ]), |
|
113 | 113 | (None, [ |
|
114 | 114 | (4, line1_tokens_list), |
|
115 | 115 | ]), |
|
116 | 116 | (annotation1, [ |
|
117 | 117 | (5, line1_tokens_list), |
|
118 | 118 | (6, line2_tokens_list), |
|
119 | 119 | ]) |
|
120 | 120 | ] |
|
121 | 121 | """ |
|
122 | 122 | |
|
123 | 123 | commit_cache = {} # cache commit_getter lookups |
|
124 | 124 | |
|
125 | 125 | def _get_annotation(commit_id, commit_getter): |
|
126 | 126 | if commit_id not in commit_cache: |
|
127 | 127 | commit_cache[commit_id] = commit_getter() |
|
128 | 128 | return commit_cache[commit_id] |
|
129 | 129 | |
|
130 | 130 | annotation_lookup = { |
|
131 | 131 | line_no: _get_annotation(commit_id, commit_getter) |
|
132 | 132 | for line_no, commit_id, commit_getter, line_content |
|
133 | 133 | in filenode.annotate |
|
134 | 134 | } |
|
135 | 135 | |
|
136 | 136 | annotations_lines = ((annotation_lookup.get(line_no), line_no, tokens) |
|
137 | 137 | for line_no, tokens |
|
138 | 138 | in enumerate(filenode_as_lines_tokens(filenode), 1)) |
|
139 | 139 | |
|
140 | 140 | grouped_annotations_lines = groupby(annotations_lines, lambda x: x[0]) |
|
141 | 141 | |
|
142 | 142 | for annotation, group in grouped_annotations_lines: |
|
143 | 143 | yield ( |
|
144 | 144 | annotation, [(line_no, tokens) |
|
145 | 145 | for (_, line_no, tokens) in group] |
|
146 | 146 | ) |
|
147 | 147 | |
|
148 | 148 | |
|
149 | 149 | def render_tokenstream(tokenstream): |
|
150 | 150 | result = [] |
|
151 | 151 | for token_class, token_ops_texts in rollup_tokenstream(tokenstream): |
|
152 | 152 | |
|
153 | 153 | if token_class: |
|
154 | 154 | result.append(u'<span class="%s">' % token_class) |
|
155 | 155 | else: |
|
156 | 156 | result.append(u'<span>') |
|
157 | 157 | |
|
158 | 158 | for op_tag, token_text in token_ops_texts: |
|
159 | 159 | |
|
160 | 160 | if op_tag: |
|
161 | 161 | result.append(u'<%s>' % op_tag) |
|
162 | 162 | |
|
163 | 163 | escaped_text = html_escape(token_text) |
|
164 | 164 | |
|
165 | 165 | # TODO: dan: investigate showing hidden characters like space/nl/tab |
|
166 | 166 | # escaped_text = escaped_text.replace(' ', '<sp> </sp>') |
|
167 | 167 | # escaped_text = escaped_text.replace('\n', '<nl>\n</nl>') |
|
168 | 168 | # escaped_text = escaped_text.replace('\t', '<tab>\t</tab>') |
|
169 | 169 | |
|
170 | 170 | result.append(escaped_text) |
|
171 | 171 | |
|
172 | 172 | if op_tag: |
|
173 | 173 | result.append(u'</%s>' % op_tag) |
|
174 | 174 | |
|
175 | 175 | result.append(u'</span>') |
|
176 | 176 | |
|
177 | 177 | html = ''.join(result) |
|
178 | 178 | return html |
|
179 | 179 | |
|
180 | 180 | |
|
181 | 181 | def rollup_tokenstream(tokenstream): |
|
182 | 182 | """ |
|
183 | 183 | Group a token stream of the format: |
|
184 | 184 | |
|
185 | 185 | ('class', 'op', 'text') |
|
186 | 186 | or |
|
187 | 187 | ('class', 'text') |
|
188 | 188 | |
|
189 | 189 | into |
|
190 | 190 | |
|
191 | 191 | [('class1', |
|
192 | 192 | [('op1', 'text'), |
|
193 | 193 | ('op2', 'text')]), |
|
194 | 194 | ('class2', |
|
195 | 195 | [('op3', 'text')])] |
|
196 | 196 | |
|
197 | 197 | This is used to get the minimal tags necessary when |
|
198 | 198 | rendering to html eg for a token stream ie. |
|
199 | 199 | |
|
200 | 200 | <span class="A"><ins>he</ins>llo</span> |
|
201 | 201 | vs |
|
202 | 202 | <span class="A"><ins>he</ins></span><span class="A">llo</span> |
|
203 | 203 | |
|
204 | 204 | If a 2 tuple is passed in, the output op will be an empty string. |
|
205 | 205 | |
|
206 | 206 | eg: |
|
207 | 207 | |
|
208 | 208 | >>> rollup_tokenstream([('classA', '', 'h'), |
|
209 | 209 | ('classA', 'del', 'ell'), |
|
210 | 210 | ('classA', '', 'o'), |
|
211 | 211 | ('classB', '', ' '), |
|
212 | 212 | ('classA', '', 'the'), |
|
213 | 213 | ('classA', '', 're'), |
|
214 | 214 | ]) |
|
215 | 215 | |
|
216 | 216 | [('classA', [('', 'h'), ('del', 'ell'), ('', 'o')], |
|
217 | 217 | ('classB', [('', ' ')], |
|
218 | 218 | ('classA', [('', 'there')]] |
|
219 | 219 | |
|
220 | 220 | """ |
|
221 | 221 | if tokenstream and len(tokenstream[0]) == 2: |
|
222 | 222 | tokenstream = ((t[0], '', t[1]) for t in tokenstream) |
|
223 | 223 | |
|
224 | 224 | result = [] |
|
225 | 225 | for token_class, op_list in groupby(tokenstream, lambda t: t[0]): |
|
226 | 226 | ops = [] |
|
227 | 227 | for token_op, token_text_list in groupby(op_list, lambda o: o[1]): |
|
228 | 228 | text_buffer = [] |
|
229 | 229 | for t_class, t_op, t_text in token_text_list: |
|
230 | 230 | text_buffer.append(t_text) |
|
231 | 231 | ops.append((token_op, ''.join(text_buffer))) |
|
232 | 232 | result.append((token_class, ops)) |
|
233 | 233 | return result |
|
234 | 234 | |
|
235 | 235 | |
|
236 | 236 | def tokens_diff(old_tokens, new_tokens, use_diff_match_patch=True): |
|
237 | 237 | """ |
|
238 | 238 | Converts a list of (token_class, token_text) tuples to a list of |
|
239 | 239 | (token_class, token_op, token_text) tuples where token_op is one of |
|
240 | 240 | ('ins', 'del', '') |
|
241 | 241 | |
|
242 | 242 | :param old_tokens: list of (token_class, token_text) tuples of old line |
|
243 | 243 | :param new_tokens: list of (token_class, token_text) tuples of new line |
|
244 | 244 | :param use_diff_match_patch: boolean, will use google's diff match patch |
|
245 | 245 | library which has options to 'smooth' out the character by character |
|
246 | 246 | differences making nicer ins/del blocks |
|
247 | 247 | """ |
|
248 | 248 | |
|
249 | 249 | old_tokens_result = [] |
|
250 | 250 | new_tokens_result = [] |
|
251 | 251 | |
|
252 | 252 | similarity = difflib.SequenceMatcher(None, |
|
253 | 253 | ''.join(token_text for token_class, token_text in old_tokens), |
|
254 | 254 | ''.join(token_text for token_class, token_text in new_tokens) |
|
255 | 255 | ).ratio() |
|
256 | 256 | |
|
257 | 257 | if similarity < 0.6: # return, the blocks are too different |
|
258 | 258 | for token_class, token_text in old_tokens: |
|
259 | 259 | old_tokens_result.append((token_class, '', token_text)) |
|
260 | 260 | for token_class, token_text in new_tokens: |
|
261 | 261 | new_tokens_result.append((token_class, '', token_text)) |
|
262 | 262 | return old_tokens_result, new_tokens_result, similarity |
|
263 | 263 | |
|
264 | 264 | token_sequence_matcher = difflib.SequenceMatcher(None, |
|
265 | 265 | [x[1] for x in old_tokens], |
|
266 | 266 | [x[1] for x in new_tokens]) |
|
267 | 267 | |
|
268 | 268 | for tag, o1, o2, n1, n2 in token_sequence_matcher.get_opcodes(): |
|
269 | 269 | # check the differences by token block types first to give a more |
|
270 | 270 | # nicer "block" level replacement vs character diffs |
|
271 | 271 | |
|
272 | 272 | if tag == 'equal': |
|
273 | 273 | for token_class, token_text in old_tokens[o1:o2]: |
|
274 | 274 | old_tokens_result.append((token_class, '', token_text)) |
|
275 | 275 | for token_class, token_text in new_tokens[n1:n2]: |
|
276 | 276 | new_tokens_result.append((token_class, '', token_text)) |
|
277 | 277 | elif tag == 'delete': |
|
278 | 278 | for token_class, token_text in old_tokens[o1:o2]: |
|
279 | 279 | old_tokens_result.append((token_class, 'del', token_text)) |
|
280 | 280 | elif tag == 'insert': |
|
281 | 281 | for token_class, token_text in new_tokens[n1:n2]: |
|
282 | 282 | new_tokens_result.append((token_class, 'ins', token_text)) |
|
283 | 283 | elif tag == 'replace': |
|
284 | 284 | # if same type token blocks must be replaced, do a diff on the |
|
285 | 285 | # characters in the token blocks to show individual changes |
|
286 | 286 | |
|
287 | 287 | old_char_tokens = [] |
|
288 | 288 | new_char_tokens = [] |
|
289 | 289 | for token_class, token_text in old_tokens[o1:o2]: |
|
290 | 290 | for char in token_text: |
|
291 | 291 | old_char_tokens.append((token_class, char)) |
|
292 | 292 | |
|
293 | 293 | for token_class, token_text in new_tokens[n1:n2]: |
|
294 | 294 | for char in token_text: |
|
295 | 295 | new_char_tokens.append((token_class, char)) |
|
296 | 296 | |
|
297 | 297 | old_string = ''.join([token_text for |
|
298 | 298 | token_class, token_text in old_char_tokens]) |
|
299 | 299 | new_string = ''.join([token_text for |
|
300 | 300 | token_class, token_text in new_char_tokens]) |
|
301 | 301 | |
|
302 | 302 | char_sequence = difflib.SequenceMatcher( |
|
303 | 303 | None, old_string, new_string) |
|
304 | 304 | copcodes = char_sequence.get_opcodes() |
|
305 | 305 | obuffer, nbuffer = [], [] |
|
306 | 306 | |
|
307 | 307 | if use_diff_match_patch: |
|
308 | 308 | dmp = diff_match_patch() |
|
309 | 309 | dmp.Diff_EditCost = 11 # TODO: dan: extract this to a setting |
|
310 | 310 | reps = dmp.diff_main(old_string, new_string) |
|
311 | 311 | dmp.diff_cleanupEfficiency(reps) |
|
312 | 312 | |
|
313 | 313 | a, b = 0, 0 |
|
314 | 314 | for op, rep in reps: |
|
315 | 315 | l = len(rep) |
|
316 | 316 | if op == 0: |
|
317 | 317 | for i, c in enumerate(rep): |
|
318 | 318 | obuffer.append((old_char_tokens[a+i][0], '', c)) |
|
319 | 319 | nbuffer.append((new_char_tokens[b+i][0], '', c)) |
|
320 | 320 | a += l |
|
321 | 321 | b += l |
|
322 | 322 | elif op == -1: |
|
323 | 323 | for i, c in enumerate(rep): |
|
324 | 324 | obuffer.append((old_char_tokens[a+i][0], 'del', c)) |
|
325 | 325 | a += l |
|
326 | 326 | elif op == 1: |
|
327 | 327 | for i, c in enumerate(rep): |
|
328 | 328 | nbuffer.append((new_char_tokens[b+i][0], 'ins', c)) |
|
329 | 329 | b += l |
|
330 | 330 | else: |
|
331 | 331 | for ctag, co1, co2, cn1, cn2 in copcodes: |
|
332 | 332 | if ctag == 'equal': |
|
333 | 333 | for token_class, token_text in old_char_tokens[co1:co2]: |
|
334 | 334 | obuffer.append((token_class, '', token_text)) |
|
335 | 335 | for token_class, token_text in new_char_tokens[cn1:cn2]: |
|
336 | 336 | nbuffer.append((token_class, '', token_text)) |
|
337 | 337 | elif ctag == 'delete': |
|
338 | 338 | for token_class, token_text in old_char_tokens[co1:co2]: |
|
339 | 339 | obuffer.append((token_class, 'del', token_text)) |
|
340 | 340 | elif ctag == 'insert': |
|
341 | 341 | for token_class, token_text in new_char_tokens[cn1:cn2]: |
|
342 | 342 | nbuffer.append((token_class, 'ins', token_text)) |
|
343 | 343 | elif ctag == 'replace': |
|
344 | 344 | for token_class, token_text in old_char_tokens[co1:co2]: |
|
345 | 345 | obuffer.append((token_class, 'del', token_text)) |
|
346 | 346 | for token_class, token_text in new_char_tokens[cn1:cn2]: |
|
347 | 347 | nbuffer.append((token_class, 'ins', token_text)) |
|
348 | 348 | |
|
349 | 349 | old_tokens_result.extend(obuffer) |
|
350 | 350 | new_tokens_result.extend(nbuffer) |
|
351 | 351 | |
|
352 | 352 | return old_tokens_result, new_tokens_result, similarity |
|
353 | 353 | |
|
354 | 354 | |
|
355 | 355 | def diffset_node_getter(commit): |
|
356 | 356 | def get_node(fname): |
|
357 | 357 | try: |
|
358 | 358 | return commit.get_node(fname) |
|
359 | 359 | except NodeDoesNotExistError: |
|
360 | 360 | return None |
|
361 | 361 | |
|
362 | 362 | return get_node |
|
363 | 363 | |
|
364 | 364 | |
|
365 | 365 | class DiffSet(object): |
|
366 | 366 | """ |
|
367 | 367 | An object for parsing the diff result from diffs.DiffProcessor and |
|
368 | 368 | adding highlighting, side by side/unified renderings and line diffs |
|
369 | 369 | """ |
|
370 | 370 | |
|
371 | 371 | HL_REAL = 'REAL' # highlights using original file, slow |
|
372 | 372 | HL_FAST = 'FAST' # highlights using just the line, fast but not correct |
|
373 | 373 | # in the case of multiline code |
|
374 | 374 | HL_NONE = 'NONE' # no highlighting, fastest |
|
375 | 375 | |
|
376 | 376 | def __init__(self, highlight_mode=HL_REAL, repo_name=None, |
|
377 | 377 | source_repo_name=None, |
|
378 | 378 | source_node_getter=lambda filename: None, |
|
379 | 379 | target_node_getter=lambda filename: None, |
|
380 | 380 | source_nodes=None, target_nodes=None, |
|
381 | 381 | max_file_size_limit=150 * 1024, # files over this size will |
|
382 | 382 | # use fast highlighting |
|
383 | 383 | comments=None, |
|
384 | 384 | ): |
|
385 | 385 | |
|
386 | 386 | self.highlight_mode = highlight_mode |
|
387 | 387 | self.highlighted_filenodes = {} |
|
388 | 388 | self.source_node_getter = source_node_getter |
|
389 | 389 | self.target_node_getter = target_node_getter |
|
390 | 390 | self.source_nodes = source_nodes or {} |
|
391 | 391 | self.target_nodes = target_nodes or {} |
|
392 | 392 | self.repo_name = repo_name |
|
393 | 393 | self.source_repo_name = source_repo_name or repo_name |
|
394 | 394 | self.comments = comments or {} |
|
395 | 395 | self.comments_store = self.comments.copy() |
|
396 | 396 | self.max_file_size_limit = max_file_size_limit |
|
397 | 397 | |
|
398 | 398 | def render_patchset(self, patchset, source_ref=None, target_ref=None): |
|
399 | 399 | diffset = AttributeDict(dict( |
|
400 | 400 | lines_added=0, |
|
401 | 401 | lines_deleted=0, |
|
402 | 402 | changed_files=0, |
|
403 | 403 | files=[], |
|
404 | 404 | file_stats={}, |
|
405 | 405 | limited_diff=isinstance(patchset, LimitedDiffContainer), |
|
406 | 406 | repo_name=self.repo_name, |
|
407 | 407 | source_repo_name=self.source_repo_name, |
|
408 | 408 | source_ref=source_ref, |
|
409 | 409 | target_ref=target_ref, |
|
410 | 410 | )) |
|
411 | 411 | for patch in patchset: |
|
412 | 412 | diffset.file_stats[patch['filename']] = patch['stats'] |
|
413 | 413 | filediff = self.render_patch(patch) |
|
414 | 414 | filediff.diffset = StrictAttributeDict(dict( |
|
415 | 415 | source_ref=diffset.source_ref, |
|
416 | 416 | target_ref=diffset.target_ref, |
|
417 | 417 | repo_name=diffset.repo_name, |
|
418 | 418 | source_repo_name=diffset.source_repo_name, |
|
419 | 419 | )) |
|
420 | 420 | diffset.files.append(filediff) |
|
421 | 421 | diffset.changed_files += 1 |
|
422 | 422 | if not patch['stats']['binary']: |
|
423 | 423 | diffset.lines_added += patch['stats']['added'] |
|
424 | 424 | diffset.lines_deleted += patch['stats']['deleted'] |
|
425 | 425 | |
|
426 | 426 | return diffset |
|
427 | 427 | |
|
428 | 428 | _lexer_cache = {} |
|
429 | 429 | |
|
430 | 430 | def _get_lexer_for_filename(self, filename, filenode=None): |
|
431 | 431 | # cached because we might need to call it twice for source/target |
|
432 | 432 | if filename not in self._lexer_cache: |
|
433 | 433 | if filenode: |
|
434 | 434 | lexer = filenode.lexer |
|
435 | 435 | extension = filenode.extension |
|
436 | 436 | else: |
|
437 | 437 | lexer = FileNode.get_lexer(filename=filename) |
|
438 | 438 | extension = filename.split('.')[-1] |
|
439 | 439 | |
|
440 | 440 | lexer = get_custom_lexer(extension) or lexer |
|
441 | 441 | self._lexer_cache[filename] = lexer |
|
442 | 442 | return self._lexer_cache[filename] |
|
443 | 443 | |
|
444 | 444 | def render_patch(self, patch): |
|
445 |
log.debug('rendering diff for %r' |
|
|
445 | log.debug('rendering diff for %r', patch['filename']) | |
|
446 | 446 | |
|
447 | 447 | source_filename = patch['original_filename'] |
|
448 | 448 | target_filename = patch['filename'] |
|
449 | 449 | |
|
450 | 450 | source_lexer = plain_text_lexer |
|
451 | 451 | target_lexer = plain_text_lexer |
|
452 | 452 | |
|
453 | 453 | if not patch['stats']['binary']: |
|
454 | 454 | if self.highlight_mode == self.HL_REAL: |
|
455 | 455 | if (source_filename and patch['operation'] in ('D', 'M') |
|
456 | 456 | and source_filename not in self.source_nodes): |
|
457 | 457 | self.source_nodes[source_filename] = ( |
|
458 | 458 | self.source_node_getter(source_filename)) |
|
459 | 459 | |
|
460 | 460 | if (target_filename and patch['operation'] in ('A', 'M') |
|
461 | 461 | and target_filename not in self.target_nodes): |
|
462 | 462 | self.target_nodes[target_filename] = ( |
|
463 | 463 | self.target_node_getter(target_filename)) |
|
464 | 464 | |
|
465 | 465 | elif self.highlight_mode == self.HL_FAST: |
|
466 | 466 | source_lexer = self._get_lexer_for_filename(source_filename) |
|
467 | 467 | target_lexer = self._get_lexer_for_filename(target_filename) |
|
468 | 468 | |
|
469 | 469 | source_file = self.source_nodes.get(source_filename, source_filename) |
|
470 | 470 | target_file = self.target_nodes.get(target_filename, target_filename) |
|
471 | 471 | |
|
472 | 472 | source_filenode, target_filenode = None, None |
|
473 | 473 | |
|
474 | 474 | # TODO: dan: FileNode.lexer works on the content of the file - which |
|
475 | 475 | # can be slow - issue #4289 explains a lexer clean up - which once |
|
476 | 476 | # done can allow caching a lexer for a filenode to avoid the file lookup |
|
477 | 477 | if isinstance(source_file, FileNode): |
|
478 | 478 | source_filenode = source_file |
|
479 | 479 | #source_lexer = source_file.lexer |
|
480 | 480 | source_lexer = self._get_lexer_for_filename(source_filename) |
|
481 | 481 | source_file.lexer = source_lexer |
|
482 | 482 | |
|
483 | 483 | if isinstance(target_file, FileNode): |
|
484 | 484 | target_filenode = target_file |
|
485 | 485 | #target_lexer = target_file.lexer |
|
486 | 486 | target_lexer = self._get_lexer_for_filename(target_filename) |
|
487 | 487 | target_file.lexer = target_lexer |
|
488 | 488 | |
|
489 | 489 | source_file_path, target_file_path = None, None |
|
490 | 490 | |
|
491 | 491 | if source_filename != '/dev/null': |
|
492 | 492 | source_file_path = source_filename |
|
493 | 493 | if target_filename != '/dev/null': |
|
494 | 494 | target_file_path = target_filename |
|
495 | 495 | |
|
496 | 496 | source_file_type = source_lexer.name |
|
497 | 497 | target_file_type = target_lexer.name |
|
498 | 498 | |
|
499 | 499 | filediff = AttributeDict({ |
|
500 | 500 | 'source_file_path': source_file_path, |
|
501 | 501 | 'target_file_path': target_file_path, |
|
502 | 502 | 'source_filenode': source_filenode, |
|
503 | 503 | 'target_filenode': target_filenode, |
|
504 | 504 | 'source_file_type': target_file_type, |
|
505 | 505 | 'target_file_type': source_file_type, |
|
506 | 506 | 'patch': {'filename': patch['filename'], 'stats': patch['stats']}, |
|
507 | 507 | 'operation': patch['operation'], |
|
508 | 508 | 'source_mode': patch['stats']['old_mode'], |
|
509 | 509 | 'target_mode': patch['stats']['new_mode'], |
|
510 | 510 | 'limited_diff': isinstance(patch, LimitedDiffContainer), |
|
511 | 511 | 'hunks': [], |
|
512 | 512 | 'diffset': self, |
|
513 | 513 | }) |
|
514 | 514 | |
|
515 | 515 | for hunk in patch['chunks'][1:]: |
|
516 | 516 | hunkbit = self.parse_hunk(hunk, source_file, target_file) |
|
517 | 517 | hunkbit.source_file_path = source_file_path |
|
518 | 518 | hunkbit.target_file_path = target_file_path |
|
519 | 519 | filediff.hunks.append(hunkbit) |
|
520 | 520 | |
|
521 | 521 | left_comments = {} |
|
522 | 522 | if source_file_path in self.comments_store: |
|
523 | 523 | for lineno, comments in self.comments_store[source_file_path].items(): |
|
524 | 524 | left_comments[lineno] = comments |
|
525 | 525 | |
|
526 | 526 | if target_file_path in self.comments_store: |
|
527 | 527 | for lineno, comments in self.comments_store[target_file_path].items(): |
|
528 | 528 | left_comments[lineno] = comments |
|
529 | 529 | |
|
530 | 530 | # left comments are one that we couldn't place in diff lines. |
|
531 | 531 | # could be outdated, or the diff changed and this line is no |
|
532 | 532 | # longer available |
|
533 | 533 | filediff.left_comments = left_comments |
|
534 | 534 | |
|
535 | 535 | return filediff |
|
536 | 536 | |
|
537 | 537 | def parse_hunk(self, hunk, source_file, target_file): |
|
538 | 538 | result = AttributeDict(dict( |
|
539 | 539 | source_start=hunk['source_start'], |
|
540 | 540 | source_length=hunk['source_length'], |
|
541 | 541 | target_start=hunk['target_start'], |
|
542 | 542 | target_length=hunk['target_length'], |
|
543 | 543 | section_header=hunk['section_header'], |
|
544 | 544 | lines=[], |
|
545 | 545 | )) |
|
546 | 546 | before, after = [], [] |
|
547 | 547 | |
|
548 | 548 | for line in hunk['lines']: |
|
549 | 549 | |
|
550 | 550 | if line['action'] == 'unmod': |
|
551 | 551 | result.lines.extend( |
|
552 | 552 | self.parse_lines(before, after, source_file, target_file)) |
|
553 | 553 | after.append(line) |
|
554 | 554 | before.append(line) |
|
555 | 555 | elif line['action'] == 'add': |
|
556 | 556 | after.append(line) |
|
557 | 557 | elif line['action'] == 'del': |
|
558 | 558 | before.append(line) |
|
559 | 559 | elif line['action'] == 'old-no-nl': |
|
560 | 560 | before.append(line) |
|
561 | 561 | elif line['action'] == 'new-no-nl': |
|
562 | 562 | after.append(line) |
|
563 | 563 | |
|
564 | 564 | result.lines.extend( |
|
565 | 565 | self.parse_lines(before, after, source_file, target_file)) |
|
566 | 566 | result.unified = list(self.as_unified(result.lines)) |
|
567 | 567 | result.sideside = result.lines |
|
568 | 568 | |
|
569 | 569 | return result |
|
570 | 570 | |
|
571 | 571 | def parse_lines(self, before_lines, after_lines, source_file, target_file): |
|
572 | 572 | # TODO: dan: investigate doing the diff comparison and fast highlighting |
|
573 | 573 | # on the entire before and after buffered block lines rather than by |
|
574 | 574 | # line, this means we can get better 'fast' highlighting if the context |
|
575 | 575 | # allows it - eg. |
|
576 | 576 | # line 4: """ |
|
577 | 577 | # line 5: this gets highlighted as a string |
|
578 | 578 | # line 6: """ |
|
579 | 579 | |
|
580 | 580 | lines = [] |
|
581 | 581 | |
|
582 | 582 | before_newline = AttributeDict() |
|
583 | 583 | after_newline = AttributeDict() |
|
584 | 584 | if before_lines and before_lines[-1]['action'] == 'old-no-nl': |
|
585 | 585 | before_newline_line = before_lines.pop(-1) |
|
586 | 586 | before_newline.content = '\n {}'.format( |
|
587 | 587 | render_tokenstream( |
|
588 | 588 | [(x[0], '', x[1]) |
|
589 | 589 | for x in [('nonl', before_newline_line['line'])]])) |
|
590 | 590 | |
|
591 | 591 | if after_lines and after_lines[-1]['action'] == 'new-no-nl': |
|
592 | 592 | after_newline_line = after_lines.pop(-1) |
|
593 | 593 | after_newline.content = '\n {}'.format( |
|
594 | 594 | render_tokenstream( |
|
595 | 595 | [(x[0], '', x[1]) |
|
596 | 596 | for x in [('nonl', after_newline_line['line'])]])) |
|
597 | 597 | |
|
598 | 598 | while before_lines or after_lines: |
|
599 | 599 | before, after = None, None |
|
600 | 600 | before_tokens, after_tokens = None, None |
|
601 | 601 | |
|
602 | 602 | if before_lines: |
|
603 | 603 | before = before_lines.pop(0) |
|
604 | 604 | if after_lines: |
|
605 | 605 | after = after_lines.pop(0) |
|
606 | 606 | |
|
607 | 607 | original = AttributeDict() |
|
608 | 608 | modified = AttributeDict() |
|
609 | 609 | |
|
610 | 610 | if before: |
|
611 | 611 | if before['action'] == 'old-no-nl': |
|
612 | 612 | before_tokens = [('nonl', before['line'])] |
|
613 | 613 | else: |
|
614 | 614 | before_tokens = self.get_line_tokens( |
|
615 | 615 | line_text=before['line'], |
|
616 | 616 | line_number=before['old_lineno'], |
|
617 | 617 | file=source_file) |
|
618 | 618 | original.lineno = before['old_lineno'] |
|
619 | 619 | original.content = before['line'] |
|
620 | 620 | original.action = self.action_to_op(before['action']) |
|
621 | 621 | |
|
622 | 622 | original.get_comment_args = ( |
|
623 | 623 | source_file, 'o', before['old_lineno']) |
|
624 | 624 | |
|
625 | 625 | if after: |
|
626 | 626 | if after['action'] == 'new-no-nl': |
|
627 | 627 | after_tokens = [('nonl', after['line'])] |
|
628 | 628 | else: |
|
629 | 629 | after_tokens = self.get_line_tokens( |
|
630 | 630 | line_text=after['line'], line_number=after['new_lineno'], |
|
631 | 631 | file=target_file) |
|
632 | 632 | modified.lineno = after['new_lineno'] |
|
633 | 633 | modified.content = after['line'] |
|
634 | 634 | modified.action = self.action_to_op(after['action']) |
|
635 | 635 | |
|
636 | 636 | modified.get_comment_args = ( |
|
637 | 637 | target_file, 'n', after['new_lineno']) |
|
638 | 638 | |
|
639 | 639 | # diff the lines |
|
640 | 640 | if before_tokens and after_tokens: |
|
641 | 641 | o_tokens, m_tokens, similarity = tokens_diff( |
|
642 | 642 | before_tokens, after_tokens) |
|
643 | 643 | original.content = render_tokenstream(o_tokens) |
|
644 | 644 | modified.content = render_tokenstream(m_tokens) |
|
645 | 645 | elif before_tokens: |
|
646 | 646 | original.content = render_tokenstream( |
|
647 | 647 | [(x[0], '', x[1]) for x in before_tokens]) |
|
648 | 648 | elif after_tokens: |
|
649 | 649 | modified.content = render_tokenstream( |
|
650 | 650 | [(x[0], '', x[1]) for x in after_tokens]) |
|
651 | 651 | |
|
652 | 652 | if not before_lines and before_newline: |
|
653 | 653 | original.content += before_newline.content |
|
654 | 654 | before_newline = None |
|
655 | 655 | if not after_lines and after_newline: |
|
656 | 656 | modified.content += after_newline.content |
|
657 | 657 | after_newline = None |
|
658 | 658 | |
|
659 | 659 | lines.append(AttributeDict({ |
|
660 | 660 | 'original': original, |
|
661 | 661 | 'modified': modified, |
|
662 | 662 | })) |
|
663 | 663 | |
|
664 | 664 | return lines |
|
665 | 665 | |
|
666 | 666 | def get_line_tokens(self, line_text, line_number, file=None): |
|
667 | 667 | filenode = None |
|
668 | 668 | filename = None |
|
669 | 669 | |
|
670 | 670 | if isinstance(file, basestring): |
|
671 | 671 | filename = file |
|
672 | 672 | elif isinstance(file, FileNode): |
|
673 | 673 | filenode = file |
|
674 | 674 | filename = file.unicode_path |
|
675 | 675 | |
|
676 | 676 | if self.highlight_mode == self.HL_REAL and filenode: |
|
677 | 677 | lexer = self._get_lexer_for_filename(filename) |
|
678 | 678 | file_size_allowed = file.size < self.max_file_size_limit |
|
679 | 679 | if line_number and file_size_allowed: |
|
680 | 680 | return self.get_tokenized_filenode_line( |
|
681 | 681 | file, line_number, lexer) |
|
682 | 682 | |
|
683 | 683 | if self.highlight_mode in (self.HL_REAL, self.HL_FAST) and filename: |
|
684 | 684 | lexer = self._get_lexer_for_filename(filename) |
|
685 | 685 | return list(tokenize_string(line_text, lexer)) |
|
686 | 686 | |
|
687 | 687 | return list(tokenize_string(line_text, plain_text_lexer)) |
|
688 | 688 | |
|
689 | 689 | def get_tokenized_filenode_line(self, filenode, line_number, lexer=None): |
|
690 | 690 | |
|
691 | 691 | if filenode not in self.highlighted_filenodes: |
|
692 | 692 | tokenized_lines = filenode_as_lines_tokens(filenode, lexer) |
|
693 | 693 | self.highlighted_filenodes[filenode] = tokenized_lines |
|
694 | 694 | return self.highlighted_filenodes[filenode][line_number - 1] |
|
695 | 695 | |
|
696 | 696 | def action_to_op(self, action): |
|
697 | 697 | return { |
|
698 | 698 | 'add': '+', |
|
699 | 699 | 'del': '-', |
|
700 | 700 | 'unmod': ' ', |
|
701 | 701 | 'old-no-nl': ' ', |
|
702 | 702 | 'new-no-nl': ' ', |
|
703 | 703 | }.get(action, action) |
|
704 | 704 | |
|
705 | 705 | def as_unified(self, lines): |
|
706 | 706 | """ |
|
707 | 707 | Return a generator that yields the lines of a diff in unified order |
|
708 | 708 | """ |
|
709 | 709 | def generator(): |
|
710 | 710 | buf = [] |
|
711 | 711 | for line in lines: |
|
712 | 712 | |
|
713 | 713 | if buf and not line.original or line.original.action == ' ': |
|
714 | 714 | for b in buf: |
|
715 | 715 | yield b |
|
716 | 716 | buf = [] |
|
717 | 717 | |
|
718 | 718 | if line.original: |
|
719 | 719 | if line.original.action == ' ': |
|
720 | 720 | yield (line.original.lineno, line.modified.lineno, |
|
721 | 721 | line.original.action, line.original.content, |
|
722 | 722 | line.original.get_comment_args) |
|
723 | 723 | continue |
|
724 | 724 | |
|
725 | 725 | if line.original.action == '-': |
|
726 | 726 | yield (line.original.lineno, None, |
|
727 | 727 | line.original.action, line.original.content, |
|
728 | 728 | line.original.get_comment_args) |
|
729 | 729 | |
|
730 | 730 | if line.modified.action == '+': |
|
731 | 731 | buf.append(( |
|
732 | 732 | None, line.modified.lineno, |
|
733 | 733 | line.modified.action, line.modified.content, |
|
734 | 734 | line.modified.get_comment_args)) |
|
735 | 735 | continue |
|
736 | 736 | |
|
737 | 737 | if line.modified: |
|
738 | 738 | yield (None, line.modified.lineno, |
|
739 | 739 | line.modified.action, line.modified.content, |
|
740 | 740 | line.modified.get_comment_args) |
|
741 | 741 | |
|
742 | 742 | for b in buf: |
|
743 | 743 | yield b |
|
744 | 744 | |
|
745 | 745 | return generator() |
@@ -1,621 +1,621 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2018 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | Database creation, and setup module for RhodeCode Enterprise. Used for creation |
|
23 | 23 | of database as well as for migration operations |
|
24 | 24 | """ |
|
25 | 25 | |
|
26 | 26 | import os |
|
27 | 27 | import sys |
|
28 | 28 | import time |
|
29 | 29 | import uuid |
|
30 | 30 | import logging |
|
31 | 31 | import getpass |
|
32 | 32 | from os.path import dirname as dn, join as jn |
|
33 | 33 | |
|
34 | 34 | from sqlalchemy.engine import create_engine |
|
35 | 35 | |
|
36 | 36 | from rhodecode import __dbversion__ |
|
37 | 37 | from rhodecode.model import init_model |
|
38 | 38 | from rhodecode.model.user import UserModel |
|
39 | 39 | from rhodecode.model.db import ( |
|
40 | 40 | User, Permission, RhodeCodeUi, RhodeCodeSetting, UserToPerm, |
|
41 | 41 | DbMigrateVersion, RepoGroup, UserRepoGroupToPerm, CacheKey, Repository) |
|
42 | 42 | from rhodecode.model.meta import Session, Base |
|
43 | 43 | from rhodecode.model.permission import PermissionModel |
|
44 | 44 | from rhodecode.model.repo import RepoModel |
|
45 | 45 | from rhodecode.model.repo_group import RepoGroupModel |
|
46 | 46 | from rhodecode.model.settings import SettingsModel |
|
47 | 47 | |
|
48 | 48 | |
|
49 | 49 | log = logging.getLogger(__name__) |
|
50 | 50 | |
|
51 | 51 | |
|
52 | 52 | def notify(msg): |
|
53 | 53 | """ |
|
54 | 54 | Notification for migrations messages |
|
55 | 55 | """ |
|
56 | 56 | ml = len(msg) + (4 * 2) |
|
57 | 57 | print(('\n%s\n*** %s ***\n%s' % ('*' * ml, msg, '*' * ml)).upper()) |
|
58 | 58 | |
|
59 | 59 | |
|
60 | 60 | class DbManage(object): |
|
61 | 61 | |
|
62 | 62 | def __init__(self, log_sql, dbconf, root, tests=False, |
|
63 | 63 | SESSION=None, cli_args=None): |
|
64 | 64 | self.dbname = dbconf.split('/')[-1] |
|
65 | 65 | self.tests = tests |
|
66 | 66 | self.root = root |
|
67 | 67 | self.dburi = dbconf |
|
68 | 68 | self.log_sql = log_sql |
|
69 | 69 | self.db_exists = False |
|
70 | 70 | self.cli_args = cli_args or {} |
|
71 | 71 | self.init_db(SESSION=SESSION) |
|
72 | 72 | self.ask_ok = self.get_ask_ok_func(self.cli_args.get('force_ask')) |
|
73 | 73 | |
|
74 | 74 | def get_ask_ok_func(self, param): |
|
75 | 75 | if param not in [None]: |
|
76 | 76 | # return a function lambda that has a default set to param |
|
77 | 77 | return lambda *args, **kwargs: param |
|
78 | 78 | else: |
|
79 | 79 | from rhodecode.lib.utils import ask_ok |
|
80 | 80 | return ask_ok |
|
81 | 81 | |
|
82 | 82 | def init_db(self, SESSION=None): |
|
83 | 83 | if SESSION: |
|
84 | 84 | self.sa = SESSION |
|
85 | 85 | else: |
|
86 | 86 | # init new sessions |
|
87 | 87 | engine = create_engine(self.dburi, echo=self.log_sql) |
|
88 | 88 | init_model(engine) |
|
89 | 89 | self.sa = Session() |
|
90 | 90 | |
|
91 | 91 | def create_tables(self, override=False): |
|
92 | 92 | """ |
|
93 | 93 | Create a auth database |
|
94 | 94 | """ |
|
95 | 95 | |
|
96 | 96 | log.info("Existing database with the same name is going to be destroyed.") |
|
97 | 97 | log.info("Setup command will run DROP ALL command on that database.") |
|
98 | 98 | if self.tests: |
|
99 | 99 | destroy = True |
|
100 | 100 | else: |
|
101 | 101 | destroy = self.ask_ok('Are you sure that you want to destroy the old database? [y/n]') |
|
102 | 102 | if not destroy: |
|
103 | 103 | log.info('Nothing done.') |
|
104 | 104 | sys.exit(0) |
|
105 | 105 | if destroy: |
|
106 | 106 | Base.metadata.drop_all() |
|
107 | 107 | |
|
108 | 108 | checkfirst = not override |
|
109 | 109 | Base.metadata.create_all(checkfirst=checkfirst) |
|
110 |
log.info('Created tables for %s' |
|
|
110 | log.info('Created tables for %s', self.dbname) | |
|
111 | 111 | |
|
112 | 112 | def set_db_version(self): |
|
113 | 113 | ver = DbMigrateVersion() |
|
114 | 114 | ver.version = __dbversion__ |
|
115 | 115 | ver.repository_id = 'rhodecode_db_migrations' |
|
116 | 116 | ver.repository_path = 'versions' |
|
117 | 117 | self.sa.add(ver) |
|
118 |
log.info('db version set to: %s' |
|
|
118 | log.info('db version set to: %s', __dbversion__) | |
|
119 | 119 | |
|
120 | 120 | def run_pre_migration_tasks(self): |
|
121 | 121 | """ |
|
122 | 122 | Run various tasks before actually doing migrations |
|
123 | 123 | """ |
|
124 | 124 | # delete cache keys on each upgrade |
|
125 | 125 | total = CacheKey.query().count() |
|
126 | 126 | log.info("Deleting (%s) cache keys now...", total) |
|
127 | 127 | CacheKey.delete_all_cache() |
|
128 | 128 | |
|
129 | 129 | def upgrade(self, version=None): |
|
130 | 130 | """ |
|
131 | 131 | Upgrades given database schema to given revision following |
|
132 | 132 | all needed steps, to perform the upgrade |
|
133 | 133 | |
|
134 | 134 | """ |
|
135 | 135 | |
|
136 | 136 | from rhodecode.lib.dbmigrate.migrate.versioning import api |
|
137 | 137 | from rhodecode.lib.dbmigrate.migrate.exceptions import \ |
|
138 | 138 | DatabaseNotControlledError |
|
139 | 139 | |
|
140 | 140 | if 'sqlite' in self.dburi: |
|
141 | 141 | print( |
|
142 | 142 | '********************** WARNING **********************\n' |
|
143 | 143 | 'Make sure your version of sqlite is at least 3.7.X. \n' |
|
144 | 144 | 'Earlier versions are known to fail on some migrations\n' |
|
145 | 145 | '*****************************************************\n') |
|
146 | 146 | |
|
147 | 147 | upgrade = self.ask_ok( |
|
148 | 148 | 'You are about to perform a database upgrade. Make ' |
|
149 | 149 | 'sure you have backed up your database. ' |
|
150 | 150 | 'Continue ? [y/n]') |
|
151 | 151 | if not upgrade: |
|
152 | 152 | log.info('No upgrade performed') |
|
153 | 153 | sys.exit(0) |
|
154 | 154 | |
|
155 | 155 | repository_path = jn(dn(dn(dn(os.path.realpath(__file__)))), |
|
156 | 156 | 'rhodecode/lib/dbmigrate') |
|
157 | 157 | db_uri = self.dburi |
|
158 | 158 | |
|
159 | 159 | try: |
|
160 | 160 | curr_version = version or api.db_version(db_uri, repository_path) |
|
161 | 161 | msg = ('Found current database db_uri under version ' |
|
162 | 162 | 'control with version {}'.format(curr_version)) |
|
163 | 163 | |
|
164 | 164 | except (RuntimeError, DatabaseNotControlledError): |
|
165 | 165 | curr_version = 1 |
|
166 | 166 | msg = ('Current database is not under version control. Setting ' |
|
167 | 167 | 'as version %s' % curr_version) |
|
168 | 168 | api.version_control(db_uri, repository_path, curr_version) |
|
169 | 169 | |
|
170 | 170 | notify(msg) |
|
171 | 171 | |
|
172 | 172 | self.run_pre_migration_tasks() |
|
173 | 173 | |
|
174 | 174 | if curr_version == __dbversion__: |
|
175 | 175 | log.info('This database is already at the newest version') |
|
176 | 176 | sys.exit(0) |
|
177 | 177 | |
|
178 | 178 | upgrade_steps = range(curr_version + 1, __dbversion__ + 1) |
|
179 | 179 | notify('attempting to upgrade database from ' |
|
180 | 180 | 'version %s to version %s' % (curr_version, __dbversion__)) |
|
181 | 181 | |
|
182 | 182 | # CALL THE PROPER ORDER OF STEPS TO PERFORM FULL UPGRADE |
|
183 | 183 | _step = None |
|
184 | 184 | for step in upgrade_steps: |
|
185 | 185 | notify('performing upgrade step %s' % step) |
|
186 | 186 | time.sleep(0.5) |
|
187 | 187 | |
|
188 | 188 | api.upgrade(db_uri, repository_path, step) |
|
189 | 189 | self.sa.rollback() |
|
190 | 190 | notify('schema upgrade for step %s completed' % (step,)) |
|
191 | 191 | |
|
192 | 192 | _step = step |
|
193 | 193 | |
|
194 | 194 | notify('upgrade to version %s successful' % _step) |
|
195 | 195 | |
|
196 | 196 | def fix_repo_paths(self): |
|
197 | 197 | """ |
|
198 | 198 | Fixes an old RhodeCode version path into new one without a '*' |
|
199 | 199 | """ |
|
200 | 200 | |
|
201 | 201 | paths = self.sa.query(RhodeCodeUi)\ |
|
202 | 202 | .filter(RhodeCodeUi.ui_key == '/')\ |
|
203 | 203 | .scalar() |
|
204 | 204 | |
|
205 | 205 | paths.ui_value = paths.ui_value.replace('*', '') |
|
206 | 206 | |
|
207 | 207 | try: |
|
208 | 208 | self.sa.add(paths) |
|
209 | 209 | self.sa.commit() |
|
210 | 210 | except Exception: |
|
211 | 211 | self.sa.rollback() |
|
212 | 212 | raise |
|
213 | 213 | |
|
214 | 214 | def fix_default_user(self): |
|
215 | 215 | """ |
|
216 | 216 | Fixes an old default user with some 'nicer' default values, |
|
217 | 217 | used mostly for anonymous access |
|
218 | 218 | """ |
|
219 | 219 | def_user = self.sa.query(User)\ |
|
220 | 220 | .filter(User.username == User.DEFAULT_USER)\ |
|
221 | 221 | .one() |
|
222 | 222 | |
|
223 | 223 | def_user.name = 'Anonymous' |
|
224 | 224 | def_user.lastname = 'User' |
|
225 | 225 | def_user.email = User.DEFAULT_USER_EMAIL |
|
226 | 226 | |
|
227 | 227 | try: |
|
228 | 228 | self.sa.add(def_user) |
|
229 | 229 | self.sa.commit() |
|
230 | 230 | except Exception: |
|
231 | 231 | self.sa.rollback() |
|
232 | 232 | raise |
|
233 | 233 | |
|
234 | 234 | def fix_settings(self): |
|
235 | 235 | """ |
|
236 | 236 | Fixes rhodecode settings and adds ga_code key for google analytics |
|
237 | 237 | """ |
|
238 | 238 | |
|
239 | 239 | hgsettings3 = RhodeCodeSetting('ga_code', '') |
|
240 | 240 | |
|
241 | 241 | try: |
|
242 | 242 | self.sa.add(hgsettings3) |
|
243 | 243 | self.sa.commit() |
|
244 | 244 | except Exception: |
|
245 | 245 | self.sa.rollback() |
|
246 | 246 | raise |
|
247 | 247 | |
|
248 | 248 | def create_admin_and_prompt(self): |
|
249 | 249 | |
|
250 | 250 | # defaults |
|
251 | 251 | defaults = self.cli_args |
|
252 | 252 | username = defaults.get('username') |
|
253 | 253 | password = defaults.get('password') |
|
254 | 254 | email = defaults.get('email') |
|
255 | 255 | |
|
256 | 256 | if username is None: |
|
257 | 257 | username = raw_input('Specify admin username:') |
|
258 | 258 | if password is None: |
|
259 | 259 | password = self._get_admin_password() |
|
260 | 260 | if not password: |
|
261 | 261 | # second try |
|
262 | 262 | password = self._get_admin_password() |
|
263 | 263 | if not password: |
|
264 | 264 | sys.exit() |
|
265 | 265 | if email is None: |
|
266 | 266 | email = raw_input('Specify admin email:') |
|
267 | 267 | api_key = self.cli_args.get('api_key') |
|
268 | 268 | self.create_user(username, password, email, True, |
|
269 | 269 | strict_creation_check=False, |
|
270 | 270 | api_key=api_key) |
|
271 | 271 | |
|
272 | 272 | def _get_admin_password(self): |
|
273 | 273 | password = getpass.getpass('Specify admin password ' |
|
274 | 274 | '(min 6 chars):') |
|
275 | 275 | confirm = getpass.getpass('Confirm password:') |
|
276 | 276 | |
|
277 | 277 | if password != confirm: |
|
278 | 278 | log.error('passwords mismatch') |
|
279 | 279 | return False |
|
280 | 280 | if len(password) < 6: |
|
281 | 281 | log.error('password is too short - use at least 6 characters') |
|
282 | 282 | return False |
|
283 | 283 | |
|
284 | 284 | return password |
|
285 | 285 | |
|
286 | 286 | def create_test_admin_and_users(self): |
|
287 | 287 | log.info('creating admin and regular test users') |
|
288 | 288 | from rhodecode.tests import TEST_USER_ADMIN_LOGIN, \ |
|
289 | 289 | TEST_USER_ADMIN_PASS, TEST_USER_ADMIN_EMAIL, \ |
|
290 | 290 | TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS, \ |
|
291 | 291 | TEST_USER_REGULAR_EMAIL, TEST_USER_REGULAR2_LOGIN, \ |
|
292 | 292 | TEST_USER_REGULAR2_PASS, TEST_USER_REGULAR2_EMAIL |
|
293 | 293 | |
|
294 | 294 | self.create_user(TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS, |
|
295 | 295 | TEST_USER_ADMIN_EMAIL, True, api_key=True) |
|
296 | 296 | |
|
297 | 297 | self.create_user(TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS, |
|
298 | 298 | TEST_USER_REGULAR_EMAIL, False, api_key=True) |
|
299 | 299 | |
|
300 | 300 | self.create_user(TEST_USER_REGULAR2_LOGIN, TEST_USER_REGULAR2_PASS, |
|
301 | 301 | TEST_USER_REGULAR2_EMAIL, False, api_key=True) |
|
302 | 302 | |
|
303 | 303 | def create_ui_settings(self, repo_store_path): |
|
304 | 304 | """ |
|
305 | 305 | Creates ui settings, fills out hooks |
|
306 | 306 | and disables dotencode |
|
307 | 307 | """ |
|
308 | 308 | settings_model = SettingsModel(sa=self.sa) |
|
309 | 309 | from rhodecode.lib.vcs.backends.hg import largefiles_store |
|
310 | 310 | from rhodecode.lib.vcs.backends.git import lfs_store |
|
311 | 311 | |
|
312 | 312 | # Build HOOKS |
|
313 | 313 | hooks = [ |
|
314 | 314 | (RhodeCodeUi.HOOK_REPO_SIZE, 'python:vcsserver.hooks.repo_size'), |
|
315 | 315 | |
|
316 | 316 | # HG |
|
317 | 317 | (RhodeCodeUi.HOOK_PRE_PULL, 'python:vcsserver.hooks.pre_pull'), |
|
318 | 318 | (RhodeCodeUi.HOOK_PULL, 'python:vcsserver.hooks.log_pull_action'), |
|
319 | 319 | (RhodeCodeUi.HOOK_PRE_PUSH, 'python:vcsserver.hooks.pre_push'), |
|
320 | 320 | (RhodeCodeUi.HOOK_PRETX_PUSH, 'python:vcsserver.hooks.pre_push'), |
|
321 | 321 | (RhodeCodeUi.HOOK_PUSH, 'python:vcsserver.hooks.log_push_action'), |
|
322 | 322 | (RhodeCodeUi.HOOK_PUSH_KEY, 'python:vcsserver.hooks.key_push'), |
|
323 | 323 | |
|
324 | 324 | ] |
|
325 | 325 | |
|
326 | 326 | for key, value in hooks: |
|
327 | 327 | hook_obj = settings_model.get_ui_by_key(key) |
|
328 | 328 | hooks2 = hook_obj if hook_obj else RhodeCodeUi() |
|
329 | 329 | hooks2.ui_section = 'hooks' |
|
330 | 330 | hooks2.ui_key = key |
|
331 | 331 | hooks2.ui_value = value |
|
332 | 332 | self.sa.add(hooks2) |
|
333 | 333 | |
|
334 | 334 | # enable largefiles |
|
335 | 335 | largefiles = RhodeCodeUi() |
|
336 | 336 | largefiles.ui_section = 'extensions' |
|
337 | 337 | largefiles.ui_key = 'largefiles' |
|
338 | 338 | largefiles.ui_value = '' |
|
339 | 339 | self.sa.add(largefiles) |
|
340 | 340 | |
|
341 | 341 | # set default largefiles cache dir, defaults to |
|
342 | 342 | # /repo_store_location/.cache/largefiles |
|
343 | 343 | largefiles = RhodeCodeUi() |
|
344 | 344 | largefiles.ui_section = 'largefiles' |
|
345 | 345 | largefiles.ui_key = 'usercache' |
|
346 | 346 | largefiles.ui_value = largefiles_store(repo_store_path) |
|
347 | 347 | |
|
348 | 348 | self.sa.add(largefiles) |
|
349 | 349 | |
|
350 | 350 | # set default lfs cache dir, defaults to |
|
351 | 351 | # /repo_store_location/.cache/lfs_store |
|
352 | 352 | lfsstore = RhodeCodeUi() |
|
353 | 353 | lfsstore.ui_section = 'vcs_git_lfs' |
|
354 | 354 | lfsstore.ui_key = 'store_location' |
|
355 | 355 | lfsstore.ui_value = lfs_store(repo_store_path) |
|
356 | 356 | |
|
357 | 357 | self.sa.add(lfsstore) |
|
358 | 358 | |
|
359 | 359 | # enable hgsubversion disabled by default |
|
360 | 360 | hgsubversion = RhodeCodeUi() |
|
361 | 361 | hgsubversion.ui_section = 'extensions' |
|
362 | 362 | hgsubversion.ui_key = 'hgsubversion' |
|
363 | 363 | hgsubversion.ui_value = '' |
|
364 | 364 | hgsubversion.ui_active = False |
|
365 | 365 | self.sa.add(hgsubversion) |
|
366 | 366 | |
|
367 | 367 | # enable hgevolve disabled by default |
|
368 | 368 | hgevolve = RhodeCodeUi() |
|
369 | 369 | hgevolve.ui_section = 'extensions' |
|
370 | 370 | hgevolve.ui_key = 'evolve' |
|
371 | 371 | hgevolve.ui_value = '' |
|
372 | 372 | hgevolve.ui_active = False |
|
373 | 373 | self.sa.add(hgevolve) |
|
374 | 374 | |
|
375 | 375 | # enable hggit disabled by default |
|
376 | 376 | hggit = RhodeCodeUi() |
|
377 | 377 | hggit.ui_section = 'extensions' |
|
378 | 378 | hggit.ui_key = 'hggit' |
|
379 | 379 | hggit.ui_value = '' |
|
380 | 380 | hggit.ui_active = False |
|
381 | 381 | self.sa.add(hggit) |
|
382 | 382 | |
|
383 | 383 | # set svn branch defaults |
|
384 | 384 | branches = ["/branches/*", "/trunk"] |
|
385 | 385 | tags = ["/tags/*"] |
|
386 | 386 | |
|
387 | 387 | for branch in branches: |
|
388 | 388 | settings_model.create_ui_section_value( |
|
389 | 389 | RhodeCodeUi.SVN_BRANCH_ID, branch) |
|
390 | 390 | |
|
391 | 391 | for tag in tags: |
|
392 | 392 | settings_model.create_ui_section_value(RhodeCodeUi.SVN_TAG_ID, tag) |
|
393 | 393 | |
|
394 | 394 | def create_auth_plugin_options(self, skip_existing=False): |
|
395 | 395 | """ |
|
396 | 396 | Create default auth plugin settings, and make it active |
|
397 | 397 | |
|
398 | 398 | :param skip_existing: |
|
399 | 399 | """ |
|
400 | 400 | |
|
401 | 401 | for k, v, t in [('auth_plugins', 'egg:rhodecode-enterprise-ce#rhodecode', 'list'), |
|
402 | 402 | ('auth_rhodecode_enabled', 'True', 'bool')]: |
|
403 | 403 | if (skip_existing and |
|
404 | 404 | SettingsModel().get_setting_by_name(k) is not None): |
|
405 |
log.debug('Skipping option %s' |
|
|
405 | log.debug('Skipping option %s', k) | |
|
406 | 406 | continue |
|
407 | 407 | setting = RhodeCodeSetting(k, v, t) |
|
408 | 408 | self.sa.add(setting) |
|
409 | 409 | |
|
410 | 410 | def create_default_options(self, skip_existing=False): |
|
411 | 411 | """Creates default settings""" |
|
412 | 412 | |
|
413 | 413 | for k, v, t in [ |
|
414 | 414 | ('default_repo_enable_locking', False, 'bool'), |
|
415 | 415 | ('default_repo_enable_downloads', False, 'bool'), |
|
416 | 416 | ('default_repo_enable_statistics', False, 'bool'), |
|
417 | 417 | ('default_repo_private', False, 'bool'), |
|
418 | 418 | ('default_repo_type', 'hg', 'unicode')]: |
|
419 | 419 | |
|
420 | 420 | if (skip_existing and |
|
421 | 421 | SettingsModel().get_setting_by_name(k) is not None): |
|
422 |
log.debug('Skipping option %s' |
|
|
422 | log.debug('Skipping option %s', k) | |
|
423 | 423 | continue |
|
424 | 424 | setting = RhodeCodeSetting(k, v, t) |
|
425 | 425 | self.sa.add(setting) |
|
426 | 426 | |
|
427 | 427 | def fixup_groups(self): |
|
428 | 428 | def_usr = User.get_default_user() |
|
429 | 429 | for g in RepoGroup.query().all(): |
|
430 | 430 | g.group_name = g.get_new_name(g.name) |
|
431 | 431 | self.sa.add(g) |
|
432 | 432 | # get default perm |
|
433 | 433 | default = UserRepoGroupToPerm.query()\ |
|
434 | 434 | .filter(UserRepoGroupToPerm.group == g)\ |
|
435 | 435 | .filter(UserRepoGroupToPerm.user == def_usr)\ |
|
436 | 436 | .scalar() |
|
437 | 437 | |
|
438 | 438 | if default is None: |
|
439 |
log.debug('missing default permission for group %s adding' |
|
|
439 | log.debug('missing default permission for group %s adding', g) | |
|
440 | 440 | perm_obj = RepoGroupModel()._create_default_perms(g) |
|
441 | 441 | self.sa.add(perm_obj) |
|
442 | 442 | |
|
443 | 443 | def reset_permissions(self, username): |
|
444 | 444 | """ |
|
445 | 445 | Resets permissions to default state, useful when old systems had |
|
446 | 446 | bad permissions, we must clean them up |
|
447 | 447 | |
|
448 | 448 | :param username: |
|
449 | 449 | """ |
|
450 | 450 | default_user = User.get_by_username(username) |
|
451 | 451 | if not default_user: |
|
452 | 452 | return |
|
453 | 453 | |
|
454 | 454 | u2p = UserToPerm.query()\ |
|
455 | 455 | .filter(UserToPerm.user == default_user).all() |
|
456 | 456 | fixed = False |
|
457 | 457 | if len(u2p) != len(Permission.DEFAULT_USER_PERMISSIONS): |
|
458 | 458 | for p in u2p: |
|
459 | 459 | Session().delete(p) |
|
460 | 460 | fixed = True |
|
461 | 461 | self.populate_default_permissions() |
|
462 | 462 | return fixed |
|
463 | 463 | |
|
464 | 464 | def update_repo_info(self): |
|
465 | 465 | RepoModel.update_repoinfo() |
|
466 | 466 | |
|
467 | 467 | def config_prompt(self, test_repo_path='', retries=3): |
|
468 | 468 | defaults = self.cli_args |
|
469 | 469 | _path = defaults.get('repos_location') |
|
470 | 470 | if retries == 3: |
|
471 | 471 | log.info('Setting up repositories config') |
|
472 | 472 | |
|
473 | 473 | if _path is not None: |
|
474 | 474 | path = _path |
|
475 | 475 | elif not self.tests and not test_repo_path: |
|
476 | 476 | path = raw_input( |
|
477 | 477 | 'Enter a valid absolute path to store repositories. ' |
|
478 | 478 | 'All repositories in that path will be added automatically:' |
|
479 | 479 | ) |
|
480 | 480 | else: |
|
481 | 481 | path = test_repo_path |
|
482 | 482 | path_ok = True |
|
483 | 483 | |
|
484 | 484 | # check proper dir |
|
485 | 485 | if not os.path.isdir(path): |
|
486 | 486 | path_ok = False |
|
487 |
log.error('Given path %s is not a valid directory' |
|
|
487 | log.error('Given path %s is not a valid directory', path) | |
|
488 | 488 | |
|
489 | 489 | elif not os.path.isabs(path): |
|
490 | 490 | path_ok = False |
|
491 |
log.error('Given path %s is not an absolute path' |
|
|
491 | log.error('Given path %s is not an absolute path', path) | |
|
492 | 492 | |
|
493 | 493 | # check if path is at least readable. |
|
494 | 494 | if not os.access(path, os.R_OK): |
|
495 | 495 | path_ok = False |
|
496 |
log.error('Given path %s is not readable' |
|
|
496 | log.error('Given path %s is not readable', path) | |
|
497 | 497 | |
|
498 | 498 | # check write access, warn user about non writeable paths |
|
499 | 499 | elif not os.access(path, os.W_OK) and path_ok: |
|
500 |
log.warning('No write permission to given path %s' |
|
|
500 | log.warning('No write permission to given path %s', path) | |
|
501 | 501 | |
|
502 | 502 | q = ('Given path %s is not writeable, do you want to ' |
|
503 | 503 | 'continue with read only mode ? [y/n]' % (path,)) |
|
504 | 504 | if not self.ask_ok(q): |
|
505 | 505 | log.error('Canceled by user') |
|
506 | 506 | sys.exit(-1) |
|
507 | 507 | |
|
508 | 508 | if retries == 0: |
|
509 | 509 | sys.exit('max retries reached') |
|
510 | 510 | if not path_ok: |
|
511 | 511 | retries -= 1 |
|
512 | 512 | return self.config_prompt(test_repo_path, retries) |
|
513 | 513 | |
|
514 | 514 | real_path = os.path.normpath(os.path.realpath(path)) |
|
515 | 515 | |
|
516 | 516 | if real_path != os.path.normpath(path): |
|
517 | 517 | q = ('Path looks like a symlink, RhodeCode Enterprise will store ' |
|
518 | 518 | 'given path as %s ? [y/n]') % (real_path,) |
|
519 | 519 | if not self.ask_ok(q): |
|
520 | 520 | log.error('Canceled by user') |
|
521 | 521 | sys.exit(-1) |
|
522 | 522 | |
|
523 | 523 | return real_path |
|
524 | 524 | |
|
525 | 525 | def create_settings(self, path): |
|
526 | 526 | |
|
527 | 527 | self.create_ui_settings(path) |
|
528 | 528 | |
|
529 | 529 | ui_config = [ |
|
530 | 530 | ('web', 'push_ssl', 'False'), |
|
531 | 531 | ('web', 'allow_archive', 'gz zip bz2'), |
|
532 | 532 | ('web', 'allow_push', '*'), |
|
533 | 533 | ('web', 'baseurl', '/'), |
|
534 | 534 | ('paths', '/', path), |
|
535 | 535 | ('phases', 'publish', 'True') |
|
536 | 536 | ] |
|
537 | 537 | for section, key, value in ui_config: |
|
538 | 538 | ui_conf = RhodeCodeUi() |
|
539 | 539 | setattr(ui_conf, 'ui_section', section) |
|
540 | 540 | setattr(ui_conf, 'ui_key', key) |
|
541 | 541 | setattr(ui_conf, 'ui_value', value) |
|
542 | 542 | self.sa.add(ui_conf) |
|
543 | 543 | |
|
544 | 544 | # rhodecode app settings |
|
545 | 545 | settings = [ |
|
546 | 546 | ('realm', 'RhodeCode', 'unicode'), |
|
547 | 547 | ('title', '', 'unicode'), |
|
548 | 548 | ('pre_code', '', 'unicode'), |
|
549 | 549 | ('post_code', '', 'unicode'), |
|
550 | 550 | ('show_public_icon', True, 'bool'), |
|
551 | 551 | ('show_private_icon', True, 'bool'), |
|
552 | 552 | ('stylify_metatags', False, 'bool'), |
|
553 | 553 | ('dashboard_items', 100, 'int'), |
|
554 | 554 | ('admin_grid_items', 25, 'int'), |
|
555 | 555 | ('show_version', True, 'bool'), |
|
556 | 556 | ('use_gravatar', False, 'bool'), |
|
557 | 557 | ('gravatar_url', User.DEFAULT_GRAVATAR_URL, 'unicode'), |
|
558 | 558 | ('clone_uri_tmpl', Repository.DEFAULT_CLONE_URI, 'unicode'), |
|
559 | 559 | ('support_url', '', 'unicode'), |
|
560 | 560 | ('update_url', RhodeCodeSetting.DEFAULT_UPDATE_URL, 'unicode'), |
|
561 | 561 | ('show_revision_number', True, 'bool'), |
|
562 | 562 | ('show_sha_length', 12, 'int'), |
|
563 | 563 | ] |
|
564 | 564 | |
|
565 | 565 | for key, val, type_ in settings: |
|
566 | 566 | sett = RhodeCodeSetting(key, val, type_) |
|
567 | 567 | self.sa.add(sett) |
|
568 | 568 | |
|
569 | 569 | self.create_auth_plugin_options() |
|
570 | 570 | self.create_default_options() |
|
571 | 571 | |
|
572 | 572 | log.info('created ui config') |
|
573 | 573 | |
|
574 | 574 | def create_user(self, username, password, email='', admin=False, |
|
575 | 575 | strict_creation_check=True, api_key=None): |
|
576 |
log.info('creating user `%s`' |
|
|
576 | log.info('creating user `%s`', username) | |
|
577 | 577 | user = UserModel().create_or_update( |
|
578 | 578 | username, password, email, firstname=u'RhodeCode', lastname=u'Admin', |
|
579 | 579 | active=True, admin=admin, extern_type="rhodecode", |
|
580 | 580 | strict_creation_check=strict_creation_check) |
|
581 | 581 | |
|
582 | 582 | if api_key: |
|
583 | 583 | log.info('setting a new default auth token for user `%s`', username) |
|
584 | 584 | UserModel().add_auth_token( |
|
585 | 585 | user=user, lifetime_minutes=-1, |
|
586 | 586 | role=UserModel.auth_token_role.ROLE_ALL, |
|
587 | 587 | description=u'BUILTIN TOKEN') |
|
588 | 588 | |
|
589 | 589 | def create_default_user(self): |
|
590 | 590 | log.info('creating default user') |
|
591 | 591 | # create default user for handling default permissions. |
|
592 | 592 | user = UserModel().create_or_update(username=User.DEFAULT_USER, |
|
593 | 593 | password=str(uuid.uuid1())[:20], |
|
594 | 594 | email=User.DEFAULT_USER_EMAIL, |
|
595 | 595 | firstname=u'Anonymous', |
|
596 | 596 | lastname=u'User', |
|
597 | 597 | strict_creation_check=False) |
|
598 | 598 | # based on configuration options activate/de-activate this user which |
|
599 | 599 | # controlls anonymous access |
|
600 | 600 | if self.cli_args.get('public_access') is False: |
|
601 | 601 | log.info('Public access disabled') |
|
602 | 602 | user.active = False |
|
603 | 603 | Session().add(user) |
|
604 | 604 | Session().commit() |
|
605 | 605 | |
|
606 | 606 | def create_permissions(self): |
|
607 | 607 | """ |
|
608 | 608 | Creates all permissions defined in the system |
|
609 | 609 | """ |
|
610 | 610 | # module.(access|create|change|delete)_[name] |
|
611 | 611 | # module.(none|read|write|admin) |
|
612 | 612 | log.info('creating permissions') |
|
613 | 613 | PermissionModel(self.sa).create_permissions() |
|
614 | 614 | |
|
615 | 615 | def populate_default_permissions(self): |
|
616 | 616 | """ |
|
617 | 617 | Populate default permissions. It will create only the default |
|
618 | 618 | permissions that are missing, and not alter already defined ones |
|
619 | 619 | """ |
|
620 | 620 | log.info('creating default user permissions') |
|
621 | 621 | PermissionModel(self.sa).create_default_user_permissions(user=User.DEFAULT_USER) |
@@ -1,100 +1,100 b'' | |||
|
1 | 1 | """ |
|
2 | 2 | Script to migrate repository from sqlalchemy <= 0.4.4 to the new |
|
3 | 3 | repository schema. This shouldn't use any other migrate modules, so |
|
4 | 4 | that it can work in any version. |
|
5 | 5 | """ |
|
6 | 6 | |
|
7 | 7 | import os |
|
8 | 8 | import sys |
|
9 | 9 | import logging |
|
10 | 10 | |
|
11 | 11 | log = logging.getLogger(__name__) |
|
12 | 12 | |
|
13 | 13 | |
|
14 | 14 | def usage(): |
|
15 | 15 | """Gives usage information.""" |
|
16 | 16 | print("""Usage: %(prog)s repository-to-migrate |
|
17 | 17 | |
|
18 | 18 | Upgrade your repository to the new flat format. |
|
19 | 19 | |
|
20 | 20 | NOTE: You should probably make a backup before running this. |
|
21 | 21 | """ % {'prog': sys.argv[0]}) |
|
22 | 22 | |
|
23 | 23 | sys.exit(1) |
|
24 | 24 | |
|
25 | 25 | |
|
26 | 26 | def delete_file(filepath): |
|
27 | 27 | """Deletes a file and prints a message.""" |
|
28 |
log.info('Deleting file: %s' |
|
|
28 | log.info('Deleting file: %s', filepath) | |
|
29 | 29 | os.remove(filepath) |
|
30 | 30 | |
|
31 | 31 | |
|
32 | 32 | def move_file(src, tgt): |
|
33 | 33 | """Moves a file and prints a message.""" |
|
34 |
log.info('Moving file %s to %s' |
|
|
34 | log.info('Moving file %s to %s', src, tgt) | |
|
35 | 35 | if os.path.exists(tgt): |
|
36 | 36 | raise Exception( |
|
37 | 37 | 'Cannot move file %s because target %s already exists' % \ |
|
38 | 38 | (src, tgt)) |
|
39 | 39 | os.rename(src, tgt) |
|
40 | 40 | |
|
41 | 41 | |
|
42 | 42 | def delete_directory(dirpath): |
|
43 | 43 | """Delete a directory and print a message.""" |
|
44 |
log.info('Deleting directory: %s' |
|
|
44 | log.info('Deleting directory: %s', dirpath) | |
|
45 | 45 | os.rmdir(dirpath) |
|
46 | 46 | |
|
47 | 47 | |
|
48 | 48 | def migrate_repository(repos): |
|
49 | 49 | """Does the actual migration to the new repository format.""" |
|
50 |
log.info('Migrating repository at: %s to new format' |
|
|
50 | log.info('Migrating repository at: %s to new format', repos) | |
|
51 | 51 | versions = '%s/versions' % repos |
|
52 | 52 | dirs = os.listdir(versions) |
|
53 | 53 | # Only use int's in list. |
|
54 | 54 | numdirs = [int(dirname) for dirname in dirs if dirname.isdigit()] |
|
55 | 55 | numdirs.sort() # Sort list. |
|
56 | 56 | for dirname in numdirs: |
|
57 | 57 | origdir = '%s/%s' % (versions, dirname) |
|
58 |
log.info('Working on directory: %s' |
|
|
58 | log.info('Working on directory: %s', origdir) | |
|
59 | 59 | files = os.listdir(origdir) |
|
60 | 60 | files.sort() |
|
61 | 61 | for filename in files: |
|
62 | 62 | # Delete compiled Python files. |
|
63 | 63 | if filename.endswith('.pyc') or filename.endswith('.pyo'): |
|
64 | 64 | delete_file('%s/%s' % (origdir, filename)) |
|
65 | 65 | |
|
66 | 66 | # Delete empty __init__.py files. |
|
67 | 67 | origfile = '%s/__init__.py' % origdir |
|
68 | 68 | if os.path.exists(origfile) and len(open(origfile).read()) == 0: |
|
69 | 69 | delete_file(origfile) |
|
70 | 70 | |
|
71 | 71 | # Move sql upgrade scripts. |
|
72 | 72 | if filename.endswith('.sql'): |
|
73 | 73 | version, dbms, operation = filename.split('.', 3)[0:3] |
|
74 | 74 | origfile = '%s/%s' % (origdir, filename) |
|
75 | 75 | # For instance: 2.postgres.upgrade.sql -> |
|
76 | 76 | # 002_postgres_upgrade.sql |
|
77 | 77 | tgtfile = '%s/%03d_%s_%s.sql' % ( |
|
78 | 78 | versions, int(version), dbms, operation) |
|
79 | 79 | move_file(origfile, tgtfile) |
|
80 | 80 | |
|
81 | 81 | # Move Python upgrade script. |
|
82 | 82 | pyfile = '%s.py' % dirname |
|
83 | 83 | pyfilepath = '%s/%s' % (origdir, pyfile) |
|
84 | 84 | if os.path.exists(pyfilepath): |
|
85 | 85 | tgtfile = '%s/%03d.py' % (versions, int(dirname)) |
|
86 | 86 | move_file(pyfilepath, tgtfile) |
|
87 | 87 | |
|
88 | 88 | # Try to remove directory. Will fail if it's not empty. |
|
89 | 89 | delete_directory(origdir) |
|
90 | 90 | |
|
91 | 91 | |
|
92 | 92 | def main(): |
|
93 | 93 | """Main function to be called when using this script.""" |
|
94 | 94 | if len(sys.argv) != 2: |
|
95 | 95 | usage() |
|
96 | 96 | migrate_repository(sys.argv[1]) |
|
97 | 97 | |
|
98 | 98 | |
|
99 | 99 | if __name__ == '__main__': |
|
100 | 100 | main() |
@@ -1,75 +1,75 b'' | |||
|
1 | 1 | """ |
|
2 | 2 | A path/directory class. |
|
3 | 3 | """ |
|
4 | 4 | |
|
5 | 5 | import os |
|
6 | 6 | import shutil |
|
7 | 7 | import logging |
|
8 | 8 | |
|
9 | 9 | from rhodecode.lib.dbmigrate.migrate import exceptions |
|
10 | 10 | from rhodecode.lib.dbmigrate.migrate.versioning.config import * |
|
11 | 11 | from rhodecode.lib.dbmigrate.migrate.versioning.util import KeyedInstance |
|
12 | 12 | |
|
13 | 13 | |
|
14 | 14 | log = logging.getLogger(__name__) |
|
15 | 15 | |
|
16 | 16 | class Pathed(KeyedInstance): |
|
17 | 17 | """ |
|
18 | 18 | A class associated with a path/directory tree. |
|
19 | 19 | |
|
20 | 20 | Only one instance of this class may exist for a particular file; |
|
21 | 21 | __new__ will return an existing instance if possible |
|
22 | 22 | """ |
|
23 | 23 | parent = None |
|
24 | 24 | |
|
25 | 25 | @classmethod |
|
26 | 26 | def _key(cls, path): |
|
27 | 27 | return str(path) |
|
28 | 28 | |
|
29 | 29 | def __init__(self, path): |
|
30 | 30 | self.path = path |
|
31 | 31 | if self.__class__.parent is not None: |
|
32 | 32 | self._init_parent(path) |
|
33 | 33 | |
|
34 | 34 | def _init_parent(self, path): |
|
35 | 35 | """Try to initialize this object's parent, if it has one""" |
|
36 | 36 | parent_path = self.__class__._parent_path(path) |
|
37 | 37 | self.parent = self.__class__.parent(parent_path) |
|
38 |
log.debug("Getting parent %r:%r" |
|
|
38 | log.debug("Getting parent %r:%r", self.__class__.parent, parent_path) | |
|
39 | 39 | self.parent._init_child(path, self) |
|
40 | 40 | |
|
41 | 41 | def _init_child(self, child, path): |
|
42 | 42 | """Run when a child of this object is initialized. |
|
43 | 43 | |
|
44 | 44 | Parameters: the child object; the path to this object (its |
|
45 | 45 | parent) |
|
46 | 46 | """ |
|
47 | 47 | |
|
48 | 48 | @classmethod |
|
49 | 49 | def _parent_path(cls, path): |
|
50 | 50 | """ |
|
51 | 51 | Fetch the path of this object's parent from this object's path. |
|
52 | 52 | """ |
|
53 | 53 | # os.path.dirname(), but strip directories like files (like |
|
54 | 54 | # unix basename) |
|
55 | 55 | # |
|
56 | 56 | # Treat directories like files... |
|
57 | 57 | if path[-1] == '/': |
|
58 | 58 | path = path[:-1] |
|
59 | 59 | ret = os.path.dirname(path) |
|
60 | 60 | return ret |
|
61 | 61 | |
|
62 | 62 | @classmethod |
|
63 | 63 | def require_notfound(cls, path): |
|
64 | 64 | """Ensures a given path does not already exist""" |
|
65 | 65 | if os.path.exists(path): |
|
66 | 66 | raise exceptions.PathFoundError(path) |
|
67 | 67 | |
|
68 | 68 | @classmethod |
|
69 | 69 | def require_found(cls, path): |
|
70 | 70 | """Ensures a given path already exists""" |
|
71 | 71 | if not os.path.exists(path): |
|
72 | 72 | raise exceptions.PathNotFoundError(path) |
|
73 | 73 | |
|
74 | 74 | def __str__(self): |
|
75 | 75 | return self.path |
@@ -1,243 +1,243 b'' | |||
|
1 | 1 | """ |
|
2 | 2 | SQLAlchemy migrate repository management. |
|
3 | 3 | """ |
|
4 | 4 | import os |
|
5 | 5 | import shutil |
|
6 | 6 | import string |
|
7 | 7 | import logging |
|
8 | 8 | |
|
9 | 9 | from pkg_resources import resource_filename |
|
10 | 10 | from tempita import Template as TempitaTemplate |
|
11 | 11 | |
|
12 | 12 | from rhodecode.lib.dbmigrate.migrate import exceptions |
|
13 | 13 | from rhodecode.lib.dbmigrate.migrate.versioning import version, pathed, cfgparse |
|
14 | 14 | from rhodecode.lib.dbmigrate.migrate.versioning.template import Template |
|
15 | 15 | from rhodecode.lib.dbmigrate.migrate.versioning.config import * |
|
16 | 16 | |
|
17 | 17 | |
|
18 | 18 | log = logging.getLogger(__name__) |
|
19 | 19 | |
|
20 | 20 | class Changeset(dict): |
|
21 | 21 | """A collection of changes to be applied to a database. |
|
22 | 22 | |
|
23 | 23 | Changesets are bound to a repository and manage a set of |
|
24 | 24 | scripts from that repository. |
|
25 | 25 | |
|
26 | 26 | Behaves like a dict, for the most part. Keys are ordered based on step value. |
|
27 | 27 | """ |
|
28 | 28 | |
|
29 | 29 | def __init__(self, start, *changes, **k): |
|
30 | 30 | """ |
|
31 | 31 | Give a start version; step must be explicitly stated. |
|
32 | 32 | """ |
|
33 | 33 | self.step = k.pop('step', 1) |
|
34 | 34 | self.start = version.VerNum(start) |
|
35 | 35 | self.end = self.start |
|
36 | 36 | for change in changes: |
|
37 | 37 | self.add(change) |
|
38 | 38 | |
|
39 | 39 | def __iter__(self): |
|
40 | 40 | return iter(self.items()) |
|
41 | 41 | |
|
42 | 42 | def keys(self): |
|
43 | 43 | """ |
|
44 | 44 | In a series of upgrades x -> y, keys are version x. Sorted. |
|
45 | 45 | """ |
|
46 | 46 | ret = super(Changeset, self).keys() |
|
47 | 47 | # Reverse order if downgrading |
|
48 | 48 | ret.sort(reverse=(self.step < 1)) |
|
49 | 49 | return ret |
|
50 | 50 | |
|
51 | 51 | def values(self): |
|
52 | 52 | return [self[k] for k in self.keys()] |
|
53 | 53 | |
|
54 | 54 | def items(self): |
|
55 | 55 | return zip(self.keys(), self.values()) |
|
56 | 56 | |
|
57 | 57 | def add(self, change): |
|
58 | 58 | """Add new change to changeset""" |
|
59 | 59 | key = self.end |
|
60 | 60 | self.end += self.step |
|
61 | 61 | self[key] = change |
|
62 | 62 | |
|
63 | 63 | def run(self, *p, **k): |
|
64 | 64 | """Run the changeset scripts""" |
|
65 | 65 | for version, script in self: |
|
66 | 66 | script.run(*p, **k) |
|
67 | 67 | |
|
68 | 68 | |
|
69 | 69 | class Repository(pathed.Pathed): |
|
70 | 70 | """A project's change script repository""" |
|
71 | 71 | |
|
72 | 72 | _config = 'migrate.cfg' |
|
73 | 73 | _versions = 'versions' |
|
74 | 74 | |
|
75 | 75 | def __init__(self, path): |
|
76 |
log.debug('Loading repository %s...' |
|
|
76 | log.debug('Loading repository %s...', path) | |
|
77 | 77 | self.verify(path) |
|
78 | 78 | super(Repository, self).__init__(path) |
|
79 | 79 | self.config = cfgparse.Config(os.path.join(self.path, self._config)) |
|
80 | 80 | self.versions = version.Collection(os.path.join(self.path, |
|
81 | 81 | self._versions)) |
|
82 |
log.debug('Repository %s loaded successfully' |
|
|
83 |
log.debug('Config: %r' |
|
|
82 | log.debug('Repository %s loaded successfully', path) | |
|
83 | log.debug('Config: %r', self.config.to_dict()) | |
|
84 | 84 | |
|
85 | 85 | @classmethod |
|
86 | 86 | def verify(cls, path): |
|
87 | 87 | """ |
|
88 | 88 | Ensure the target path is a valid repository. |
|
89 | 89 | |
|
90 | 90 | :raises: :exc:`InvalidRepositoryError <migrate.exceptions.InvalidRepositoryError>` |
|
91 | 91 | """ |
|
92 | 92 | # Ensure the existence of required files |
|
93 | 93 | try: |
|
94 | 94 | cls.require_found(path) |
|
95 | 95 | cls.require_found(os.path.join(path, cls._config)) |
|
96 | 96 | cls.require_found(os.path.join(path, cls._versions)) |
|
97 | 97 | except exceptions.PathNotFoundError as e: |
|
98 | 98 | raise exceptions.InvalidRepositoryError(path) |
|
99 | 99 | |
|
100 | 100 | @classmethod |
|
101 | 101 | def prepare_config(cls, tmpl_dir, name, options=None): |
|
102 | 102 | """ |
|
103 | 103 | Prepare a project configuration file for a new project. |
|
104 | 104 | |
|
105 | 105 | :param tmpl_dir: Path to Repository template |
|
106 | 106 | :param config_file: Name of the config file in Repository template |
|
107 | 107 | :param name: Repository name |
|
108 | 108 | :type tmpl_dir: string |
|
109 | 109 | :type config_file: string |
|
110 | 110 | :type name: string |
|
111 | 111 | :returns: Populated config file |
|
112 | 112 | """ |
|
113 | 113 | if options is None: |
|
114 | 114 | options = {} |
|
115 | 115 | options.setdefault('version_table', 'migrate_version') |
|
116 | 116 | options.setdefault('repository_id', name) |
|
117 | 117 | options.setdefault('required_dbs', []) |
|
118 | 118 | options.setdefault('use_timestamp_numbering', False) |
|
119 | 119 | |
|
120 | 120 | with open(os.path.join(tmpl_dir, cls._config)) as f: |
|
121 | 121 | tmpl = f.read() |
|
122 | 122 | ret = TempitaTemplate(tmpl).substitute(options) |
|
123 | 123 | |
|
124 | 124 | # cleanup |
|
125 | 125 | del options['__template_name__'] |
|
126 | 126 | |
|
127 | 127 | return ret |
|
128 | 128 | |
|
129 | 129 | @classmethod |
|
130 | 130 | def create(cls, path, name, **opts): |
|
131 | 131 | """Create a repository at a specified path""" |
|
132 | 132 | cls.require_notfound(path) |
|
133 | 133 | theme = opts.pop('templates_theme', None) |
|
134 | 134 | t_path = opts.pop('templates_path', None) |
|
135 | 135 | |
|
136 | 136 | # Create repository |
|
137 | 137 | tmpl_dir = Template(t_path).get_repository(theme=theme) |
|
138 | 138 | shutil.copytree(tmpl_dir, path) |
|
139 | 139 | |
|
140 | 140 | # Edit config defaults |
|
141 | 141 | config_text = cls.prepare_config(tmpl_dir, name, options=opts) |
|
142 | 142 | with open(os.path.join(path, cls._config), 'w') as fd: |
|
143 | 143 | fd.write(config_text) |
|
144 | 144 | |
|
145 | 145 | opts['repository_name'] = name |
|
146 | 146 | |
|
147 | 147 | # Create a management script |
|
148 | 148 | manager = os.path.join(path, 'manage.py') |
|
149 | 149 | Repository.create_manage_file(manager, templates_theme=theme, |
|
150 | 150 | templates_path=t_path, **opts) |
|
151 | 151 | |
|
152 | 152 | return cls(path) |
|
153 | 153 | |
|
154 | 154 | def create_script(self, description, **k): |
|
155 | 155 | """API to :meth:`migrate.versioning.version.Collection.create_new_python_version`""" |
|
156 | 156 | |
|
157 | 157 | k['use_timestamp_numbering'] = self.use_timestamp_numbering |
|
158 | 158 | self.versions.create_new_python_version(description, **k) |
|
159 | 159 | |
|
160 | 160 | def create_script_sql(self, database, description, **k): |
|
161 | 161 | """API to :meth:`migrate.versioning.version.Collection.create_new_sql_version`""" |
|
162 | 162 | k['use_timestamp_numbering'] = self.use_timestamp_numbering |
|
163 | 163 | self.versions.create_new_sql_version(database, description, **k) |
|
164 | 164 | |
|
165 | 165 | @property |
|
166 | 166 | def latest(self): |
|
167 | 167 | """API to :attr:`migrate.versioning.version.Collection.latest`""" |
|
168 | 168 | return self.versions.latest |
|
169 | 169 | |
|
170 | 170 | @property |
|
171 | 171 | def version_table(self): |
|
172 | 172 | """Returns version_table name specified in config""" |
|
173 | 173 | return self.config.get('db_settings', 'version_table') |
|
174 | 174 | |
|
175 | 175 | @property |
|
176 | 176 | def id(self): |
|
177 | 177 | """Returns repository id specified in config""" |
|
178 | 178 | return self.config.get('db_settings', 'repository_id') |
|
179 | 179 | |
|
180 | 180 | @property |
|
181 | 181 | def use_timestamp_numbering(self): |
|
182 | 182 | """Returns use_timestamp_numbering specified in config""" |
|
183 | 183 | if self.config.has_option('db_settings', 'use_timestamp_numbering'): |
|
184 | 184 | return self.config.getboolean('db_settings', 'use_timestamp_numbering') |
|
185 | 185 | return False |
|
186 | 186 | |
|
187 | 187 | def version(self, *p, **k): |
|
188 | 188 | """API to :attr:`migrate.versioning.version.Collection.version`""" |
|
189 | 189 | return self.versions.version(*p, **k) |
|
190 | 190 | |
|
191 | 191 | @classmethod |
|
192 | 192 | def clear(cls): |
|
193 | 193 | # TODO: deletes repo |
|
194 | 194 | super(Repository, cls).clear() |
|
195 | 195 | version.Collection.clear() |
|
196 | 196 | |
|
197 | 197 | def changeset(self, database, start, end=None): |
|
198 | 198 | """Create a changeset to migrate this database from ver. start to end/latest. |
|
199 | 199 | |
|
200 | 200 | :param database: name of database to generate changeset |
|
201 | 201 | :param start: version to start at |
|
202 | 202 | :param end: version to end at (latest if None given) |
|
203 | 203 | :type database: string |
|
204 | 204 | :type start: int |
|
205 | 205 | :type end: int |
|
206 | 206 | :returns: :class:`Changeset instance <migration.versioning.repository.Changeset>` |
|
207 | 207 | """ |
|
208 | 208 | start = version.VerNum(start) |
|
209 | 209 | |
|
210 | 210 | if end is None: |
|
211 | 211 | end = self.latest |
|
212 | 212 | else: |
|
213 | 213 | end = version.VerNum(end) |
|
214 | 214 | |
|
215 | 215 | if start <= end: |
|
216 | 216 | step = 1 |
|
217 | 217 | range_mod = 1 |
|
218 | 218 | op = 'upgrade' |
|
219 | 219 | else: |
|
220 | 220 | step = -1 |
|
221 | 221 | range_mod = 0 |
|
222 | 222 | op = 'downgrade' |
|
223 | 223 | |
|
224 | 224 | versions = range(int(start) + range_mod, int(end) + range_mod, step) |
|
225 | 225 | changes = [self.version(v).script(database, op) for v in versions] |
|
226 | 226 | ret = Changeset(start, step=step, *changes) |
|
227 | 227 | return ret |
|
228 | 228 | |
|
229 | 229 | @classmethod |
|
230 | 230 | def create_manage_file(cls, file_, **opts): |
|
231 | 231 | """Create a project management script (manage.py) |
|
232 | 232 | |
|
233 | 233 | :param file_: Destination file to be written |
|
234 | 234 | :param opts: Options that are passed to :func:`migrate.versioning.shell.main` |
|
235 | 235 | """ |
|
236 | 236 | mng_file = Template(opts.pop('templates_path', None))\ |
|
237 | 237 | .get_manage(theme=opts.pop('templates_theme', None)) |
|
238 | 238 | |
|
239 | 239 | with open(mng_file) as f: |
|
240 | 240 | tmpl = f.read() |
|
241 | 241 | |
|
242 | 242 | with open(file_, 'w') as fd: |
|
243 | 243 | fd.write(TempitaTemplate(tmpl).substitute(opts)) |
@@ -1,56 +1,56 b'' | |||
|
1 | 1 | #!/usr/bin/env python |
|
2 | 2 | # -*- coding: utf-8 -*- |
|
3 | 3 | import logging |
|
4 | 4 | |
|
5 | 5 | from rhodecode.lib.dbmigrate.migrate import exceptions |
|
6 | 6 | from rhodecode.lib.dbmigrate.migrate.versioning.config import operations |
|
7 | 7 | from rhodecode.lib.dbmigrate.migrate.versioning import pathed |
|
8 | 8 | |
|
9 | 9 | |
|
10 | 10 | log = logging.getLogger(__name__) |
|
11 | 11 | |
|
12 | 12 | class BaseScript(pathed.Pathed): |
|
13 | 13 | """Base class for other types of scripts. |
|
14 | 14 | All scripts have the following properties: |
|
15 | 15 | |
|
16 | 16 | source (script.source()) |
|
17 | 17 | The source code of the script |
|
18 | 18 | version (script.version()) |
|
19 | 19 | The version number of the script |
|
20 | 20 | operations (script.operations()) |
|
21 | 21 | The operations defined by the script: upgrade(), downgrade() or both. |
|
22 | 22 | Returns a tuple of operations. |
|
23 | 23 | Can also check for an operation with ex. script.operation(Script.ops.up) |
|
24 | 24 | """ # TODO: sphinxfy this and implement it correctly |
|
25 | 25 | |
|
26 | 26 | def __init__(self, path): |
|
27 |
log.debug('Loading script %s...' |
|
|
27 | log.debug('Loading script %s...', path) | |
|
28 | 28 | self.verify(path) |
|
29 | 29 | super(BaseScript, self).__init__(path) |
|
30 |
log.debug('Script %s loaded successfully' |
|
|
30 | log.debug('Script %s loaded successfully', path) | |
|
31 | 31 | |
|
32 | 32 | @classmethod |
|
33 | 33 | def verify(cls, path): |
|
34 | 34 | """Ensure this is a valid script |
|
35 | 35 | This version simply ensures the script file's existence |
|
36 | 36 | |
|
37 | 37 | :raises: :exc:`InvalidScriptError <migrate.exceptions.InvalidScriptError>` |
|
38 | 38 | """ |
|
39 | 39 | try: |
|
40 | 40 | cls.require_found(path) |
|
41 | 41 | except: |
|
42 | 42 | raise exceptions.InvalidScriptError(path) |
|
43 | 43 | |
|
44 | 44 | def source(self): |
|
45 | 45 | """:returns: source code of the script. |
|
46 | 46 | :rtype: string |
|
47 | 47 | """ |
|
48 | 48 | with open(self.path) as fd: |
|
49 | 49 | ret = fd.read() |
|
50 | 50 | return ret |
|
51 | 51 | |
|
52 | 52 | def run(self, engine): |
|
53 | 53 | """Core of each BaseScript subclass. |
|
54 | 54 | This method executes the script. |
|
55 | 55 | """ |
|
56 | 56 | raise NotImplementedError() |
@@ -1,1043 +1,1043 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2018 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import os |
|
22 | 22 | import logging |
|
23 | 23 | import datetime |
|
24 | 24 | import traceback |
|
25 | 25 | from datetime import date |
|
26 | 26 | |
|
27 | 27 | from sqlalchemy import * |
|
28 | 28 | from sqlalchemy.ext.hybrid import hybrid_property |
|
29 | 29 | from sqlalchemy.orm import relationship, joinedload, class_mapper, validates |
|
30 | 30 | from beaker.cache import cache_region, region_invalidate |
|
31 | 31 | |
|
32 | 32 | from rhodecode.lib.vcs import get_backend |
|
33 | 33 | from rhodecode.lib.vcs.utils.helpers import get_scm |
|
34 | 34 | from rhodecode.lib.vcs.exceptions import VCSError |
|
35 | 35 | from zope.cachedescriptors.property import Lazy as LazyProperty |
|
36 | 36 | from rhodecode.lib.auth import generate_auth_token |
|
37 | 37 | from rhodecode.lib.utils2 import str2bool, safe_str, get_commit_safe, safe_unicode |
|
38 | 38 | from rhodecode.lib.exceptions import UserGroupAssignedException |
|
39 | 39 | from rhodecode.lib.ext_json import json |
|
40 | 40 | |
|
41 | 41 | from rhodecode.model.meta import Base, Session |
|
42 | 42 | from rhodecode.lib.caching_query import FromCache |
|
43 | 43 | |
|
44 | 44 | |
|
45 | 45 | log = logging.getLogger(__name__) |
|
46 | 46 | |
|
47 | 47 | #============================================================================== |
|
48 | 48 | # BASE CLASSES |
|
49 | 49 | #============================================================================== |
|
50 | 50 | |
|
51 | 51 | class ModelSerializer(json.JSONEncoder): |
|
52 | 52 | """ |
|
53 | 53 | Simple Serializer for JSON, |
|
54 | 54 | |
|
55 | 55 | usage:: |
|
56 | 56 | |
|
57 | 57 | to make object customized for serialization implement a __json__ |
|
58 | 58 | method that will return a dict for serialization into json |
|
59 | 59 | |
|
60 | 60 | example:: |
|
61 | 61 | |
|
62 | 62 | class Task(object): |
|
63 | 63 | |
|
64 | 64 | def __init__(self, name, value): |
|
65 | 65 | self.name = name |
|
66 | 66 | self.value = value |
|
67 | 67 | |
|
68 | 68 | def __json__(self): |
|
69 | 69 | return dict(name=self.name, |
|
70 | 70 | value=self.value) |
|
71 | 71 | |
|
72 | 72 | """ |
|
73 | 73 | |
|
74 | 74 | def default(self, obj): |
|
75 | 75 | |
|
76 | 76 | if hasattr(obj, '__json__'): |
|
77 | 77 | return obj.__json__() |
|
78 | 78 | else: |
|
79 | 79 | return json.JSONEncoder.default(self, obj) |
|
80 | 80 | |
|
81 | 81 | class BaseModel(object): |
|
82 | 82 | """Base Model for all classess |
|
83 | 83 | |
|
84 | 84 | """ |
|
85 | 85 | |
|
86 | 86 | @classmethod |
|
87 | 87 | def _get_keys(cls): |
|
88 | 88 | """return column names for this model """ |
|
89 | 89 | return class_mapper(cls).c.keys() |
|
90 | 90 | |
|
91 | 91 | def get_dict(self): |
|
92 | 92 | """return dict with keys and values corresponding |
|
93 | 93 | to this model data """ |
|
94 | 94 | |
|
95 | 95 | d = {} |
|
96 | 96 | for k in self._get_keys(): |
|
97 | 97 | d[k] = getattr(self, k) |
|
98 | 98 | return d |
|
99 | 99 | |
|
100 | 100 | def get_appstruct(self): |
|
101 | 101 | """return list with keys and values tupples corresponding |
|
102 | 102 | to this model data """ |
|
103 | 103 | |
|
104 | 104 | l = [] |
|
105 | 105 | for k in self._get_keys(): |
|
106 | 106 | l.append((k, getattr(self, k),)) |
|
107 | 107 | return l |
|
108 | 108 | |
|
109 | 109 | def populate_obj(self, populate_dict): |
|
110 | 110 | """populate model with data from given populate_dict""" |
|
111 | 111 | |
|
112 | 112 | for k in self._get_keys(): |
|
113 | 113 | if k in populate_dict: |
|
114 | 114 | setattr(self, k, populate_dict[k]) |
|
115 | 115 | |
|
116 | 116 | @classmethod |
|
117 | 117 | def query(cls): |
|
118 | 118 | return Session.query(cls) |
|
119 | 119 | |
|
120 | 120 | @classmethod |
|
121 | 121 | def get(cls, id_): |
|
122 | 122 | if id_: |
|
123 | 123 | return cls.query().get(id_) |
|
124 | 124 | |
|
125 | 125 | @classmethod |
|
126 | 126 | def getAll(cls): |
|
127 | 127 | return cls.query().all() |
|
128 | 128 | |
|
129 | 129 | @classmethod |
|
130 | 130 | def delete(cls, id_): |
|
131 | 131 | obj = cls.query().get(id_) |
|
132 | 132 | Session.delete(obj) |
|
133 | 133 | Session.commit() |
|
134 | 134 | |
|
135 | 135 | |
|
136 | 136 | class RhodeCodeSetting(Base, BaseModel): |
|
137 | 137 | __tablename__ = 'rhodecode_settings' |
|
138 | 138 | __table_args__ = (UniqueConstraint('app_settings_name'), {'extend_existing':True}) |
|
139 | 139 | app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
140 | 140 | app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None) |
|
141 | 141 | _app_settings_value = Column("app_settings_value", String(255), nullable=True, unique=None, default=None) |
|
142 | 142 | |
|
143 | 143 | def __init__(self, k='', v=''): |
|
144 | 144 | self.app_settings_name = k |
|
145 | 145 | self.app_settings_value = v |
|
146 | 146 | |
|
147 | 147 | |
|
148 | 148 | @validates('_app_settings_value') |
|
149 | 149 | def validate_settings_value(self, key, val): |
|
150 | 150 | assert type(val) == unicode |
|
151 | 151 | return val |
|
152 | 152 | |
|
153 | 153 | @hybrid_property |
|
154 | 154 | def app_settings_value(self): |
|
155 | 155 | v = self._app_settings_value |
|
156 | 156 | if v == 'ldap_active': |
|
157 | 157 | v = str2bool(v) |
|
158 | 158 | return v |
|
159 | 159 | |
|
160 | 160 | @app_settings_value.setter |
|
161 | 161 | def app_settings_value(self, val): |
|
162 | 162 | """ |
|
163 | 163 | Setter that will always make sure we use unicode in app_settings_value |
|
164 | 164 | |
|
165 | 165 | :param val: |
|
166 | 166 | """ |
|
167 | 167 | self._app_settings_value = safe_unicode(val) |
|
168 | 168 | |
|
169 | 169 | def __repr__(self): |
|
170 | 170 | return "<%s('%s:%s')>" % (self.__class__.__name__, |
|
171 | 171 | self.app_settings_name, self.app_settings_value) |
|
172 | 172 | |
|
173 | 173 | |
|
174 | 174 | @classmethod |
|
175 | 175 | def get_by_name(cls, ldap_key): |
|
176 | 176 | return cls.query()\ |
|
177 | 177 | .filter(cls.app_settings_name == ldap_key).scalar() |
|
178 | 178 | |
|
179 | 179 | @classmethod |
|
180 | 180 | def get_app_settings(cls, cache=False): |
|
181 | 181 | |
|
182 | 182 | ret = cls.query() |
|
183 | 183 | |
|
184 | 184 | if cache: |
|
185 | 185 | ret = ret.options(FromCache("sql_cache_short", "get_hg_settings")) |
|
186 | 186 | |
|
187 | 187 | if not ret: |
|
188 | 188 | raise Exception('Could not get application settings !') |
|
189 | 189 | settings = {} |
|
190 | 190 | for each in ret: |
|
191 | 191 | settings['rhodecode_' + each.app_settings_name] = \ |
|
192 | 192 | each.app_settings_value |
|
193 | 193 | |
|
194 | 194 | return settings |
|
195 | 195 | |
|
196 | 196 | @classmethod |
|
197 | 197 | def get_ldap_settings(cls, cache=False): |
|
198 | 198 | ret = cls.query()\ |
|
199 | 199 | .filter(cls.app_settings_name.startswith('ldap_')).all() |
|
200 | 200 | fd = {} |
|
201 | 201 | for row in ret: |
|
202 | 202 | fd.update({row.app_settings_name:row.app_settings_value}) |
|
203 | 203 | |
|
204 | 204 | return fd |
|
205 | 205 | |
|
206 | 206 | |
|
207 | 207 | class RhodeCodeUi(Base, BaseModel): |
|
208 | 208 | __tablename__ = 'rhodecode_ui' |
|
209 | 209 | __table_args__ = (UniqueConstraint('ui_key'), {'extend_existing':True}) |
|
210 | 210 | |
|
211 | 211 | HOOK_REPO_SIZE = 'changegroup.repo_size' |
|
212 | 212 | HOOK_PUSH = 'pretxnchangegroup.push_logger' |
|
213 | 213 | HOOK_PULL = 'preoutgoing.pull_logger' |
|
214 | 214 | |
|
215 | 215 | ui_id = Column("ui_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
216 | 216 | ui_section = Column("ui_section", String(255), nullable=True, unique=None, default=None) |
|
217 | 217 | ui_key = Column("ui_key", String(255), nullable=True, unique=None, default=None) |
|
218 | 218 | ui_value = Column("ui_value", String(255), nullable=True, unique=None, default=None) |
|
219 | 219 | ui_active = Column("ui_active", Boolean(), nullable=True, unique=None, default=True) |
|
220 | 220 | |
|
221 | 221 | |
|
222 | 222 | @classmethod |
|
223 | 223 | def get_by_key(cls, key): |
|
224 | 224 | return cls.query().filter(cls.ui_key == key) |
|
225 | 225 | |
|
226 | 226 | |
|
227 | 227 | @classmethod |
|
228 | 228 | def get_builtin_hooks(cls): |
|
229 | 229 | q = cls.query() |
|
230 | 230 | q = q.filter(cls.ui_key.in_([cls.HOOK_REPO_SIZE, |
|
231 | 231 | cls.HOOK_PUSH, cls.HOOK_PULL])) |
|
232 | 232 | return q.all() |
|
233 | 233 | |
|
234 | 234 | @classmethod |
|
235 | 235 | def get_custom_hooks(cls): |
|
236 | 236 | q = cls.query() |
|
237 | 237 | q = q.filter(~cls.ui_key.in_([cls.HOOK_REPO_SIZE, |
|
238 | 238 | cls.HOOK_PUSH, cls.HOOK_PULL])) |
|
239 | 239 | q = q.filter(cls.ui_section == 'hooks') |
|
240 | 240 | return q.all() |
|
241 | 241 | |
|
242 | 242 | @classmethod |
|
243 | 243 | def create_or_update_hook(cls, key, val): |
|
244 | 244 | new_ui = cls.get_by_key(key).scalar() or cls() |
|
245 | 245 | new_ui.ui_section = 'hooks' |
|
246 | 246 | new_ui.ui_active = True |
|
247 | 247 | new_ui.ui_key = key |
|
248 | 248 | new_ui.ui_value = val |
|
249 | 249 | |
|
250 | 250 | Session.add(new_ui) |
|
251 | 251 | Session.commit() |
|
252 | 252 | |
|
253 | 253 | |
|
254 | 254 | class User(Base, BaseModel): |
|
255 | 255 | __tablename__ = 'users' |
|
256 | 256 | __table_args__ = (UniqueConstraint('username'), UniqueConstraint('email'), {'extend_existing':True}) |
|
257 | 257 | user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
258 | 258 | username = Column("username", String(255), nullable=True, unique=None, default=None) |
|
259 | 259 | password = Column("password", String(255), nullable=True, unique=None, default=None) |
|
260 | 260 | active = Column("active", Boolean(), nullable=True, unique=None, default=None) |
|
261 | 261 | admin = Column("admin", Boolean(), nullable=True, unique=None, default=False) |
|
262 | 262 | name = Column("name", String(255), nullable=True, unique=None, default=None) |
|
263 | 263 | lastname = Column("lastname", String(255), nullable=True, unique=None, default=None) |
|
264 | 264 | email = Column("email", String(255), nullable=True, unique=None, default=None) |
|
265 | 265 | last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None) |
|
266 | 266 | ldap_dn = Column("ldap_dn", String(255), nullable=True, unique=None, default=None) |
|
267 | 267 | api_key = Column("api_key", String(255), nullable=True, unique=None, default=None) |
|
268 | 268 | |
|
269 | 269 | user_log = relationship('UserLog', cascade='all') |
|
270 | 270 | user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all') |
|
271 | 271 | |
|
272 | 272 | repositories = relationship('Repository') |
|
273 | 273 | user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all') |
|
274 | 274 | repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all') |
|
275 | 275 | |
|
276 | 276 | group_member = relationship('UserGroupMember', cascade='all') |
|
277 | 277 | |
|
278 | 278 | @property |
|
279 | 279 | def full_contact(self): |
|
280 | 280 | return '%s %s <%s>' % (self.name, self.lastname, self.email) |
|
281 | 281 | |
|
282 | 282 | @property |
|
283 | 283 | def short_contact(self): |
|
284 | 284 | return '%s %s' % (self.name, self.lastname) |
|
285 | 285 | |
|
286 | 286 | @property |
|
287 | 287 | def is_admin(self): |
|
288 | 288 | return self.admin |
|
289 | 289 | |
|
290 | 290 | def __repr__(self): |
|
291 | 291 | try: |
|
292 | 292 | return "<%s('id:%s:%s')>" % (self.__class__.__name__, |
|
293 | 293 | self.user_id, self.username) |
|
294 | 294 | except: |
|
295 | 295 | return self.__class__.__name__ |
|
296 | 296 | |
|
297 | 297 | @classmethod |
|
298 | 298 | def get_by_username(cls, username, case_insensitive=False): |
|
299 | 299 | if case_insensitive: |
|
300 | 300 | return Session.query(cls).filter(cls.username.ilike(username)).scalar() |
|
301 | 301 | else: |
|
302 | 302 | return Session.query(cls).filter(cls.username == username).scalar() |
|
303 | 303 | |
|
304 | 304 | @classmethod |
|
305 | 305 | def get_by_auth_token(cls, auth_token): |
|
306 | 306 | return cls.query().filter(cls.api_key == auth_token).one() |
|
307 | 307 | |
|
308 | 308 | def update_lastlogin(self): |
|
309 | 309 | """Update user lastlogin""" |
|
310 | 310 | |
|
311 | 311 | self.last_login = datetime.datetime.now() |
|
312 | 312 | Session.add(self) |
|
313 | 313 | Session.commit() |
|
314 |
log.debug('updated user %s lastlogin' |
|
|
314 | log.debug('updated user %s lastlogin', self.username) | |
|
315 | 315 | |
|
316 | 316 | @classmethod |
|
317 | 317 | def create(cls, form_data): |
|
318 | 318 | from rhodecode.lib.auth import get_crypt_password |
|
319 | 319 | |
|
320 | 320 | try: |
|
321 | 321 | new_user = cls() |
|
322 | 322 | for k, v in form_data.items(): |
|
323 | 323 | if k == 'password': |
|
324 | 324 | v = get_crypt_password(v) |
|
325 | 325 | setattr(new_user, k, v) |
|
326 | 326 | |
|
327 | 327 | new_user.api_key = generate_auth_token(form_data['username']) |
|
328 | 328 | Session.add(new_user) |
|
329 | 329 | Session.commit() |
|
330 | 330 | return new_user |
|
331 | 331 | except: |
|
332 | 332 | log.error(traceback.format_exc()) |
|
333 | 333 | Session.rollback() |
|
334 | 334 | raise |
|
335 | 335 | |
|
336 | 336 | class UserLog(Base, BaseModel): |
|
337 | 337 | __tablename__ = 'user_logs' |
|
338 | 338 | __table_args__ = {'extend_existing':True} |
|
339 | 339 | user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
340 | 340 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
341 | 341 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) |
|
342 | 342 | repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None) |
|
343 | 343 | user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None) |
|
344 | 344 | action = Column("action", String(1200000), nullable=True, unique=None, default=None) |
|
345 | 345 | action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None) |
|
346 | 346 | |
|
347 | 347 | @property |
|
348 | 348 | def action_as_day(self): |
|
349 | 349 | return date(*self.action_date.timetuple()[:3]) |
|
350 | 350 | |
|
351 | 351 | user = relationship('User') |
|
352 | 352 | repository = relationship('Repository') |
|
353 | 353 | |
|
354 | 354 | |
|
355 | 355 | class UserGroup(Base, BaseModel): |
|
356 | 356 | __tablename__ = 'users_groups' |
|
357 | 357 | __table_args__ = {'extend_existing':True} |
|
358 | 358 | |
|
359 | 359 | users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
360 | 360 | users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None) |
|
361 | 361 | users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None) |
|
362 | 362 | |
|
363 | 363 | members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined") |
|
364 | 364 | |
|
365 | 365 | def __repr__(self): |
|
366 | 366 | return '<userGroup(%s)>' % (self.users_group_name) |
|
367 | 367 | |
|
368 | 368 | @classmethod |
|
369 | 369 | def get_by_group_name(cls, group_name, cache=False, case_insensitive=False): |
|
370 | 370 | if case_insensitive: |
|
371 | 371 | gr = cls.query()\ |
|
372 | 372 | .filter(cls.users_group_name.ilike(group_name)) |
|
373 | 373 | else: |
|
374 | 374 | gr = cls.query()\ |
|
375 | 375 | .filter(cls.users_group_name == group_name) |
|
376 | 376 | if cache: |
|
377 | 377 | gr = gr.options(FromCache("sql_cache_short", |
|
378 | 378 | "get_user_%s" % group_name)) |
|
379 | 379 | return gr.scalar() |
|
380 | 380 | |
|
381 | 381 | @classmethod |
|
382 | 382 | def get(cls, users_group_id, cache=False): |
|
383 | 383 | users_group = cls.query() |
|
384 | 384 | if cache: |
|
385 | 385 | users_group = users_group.options(FromCache("sql_cache_short", |
|
386 | 386 | "get_users_group_%s" % users_group_id)) |
|
387 | 387 | return users_group.get(users_group_id) |
|
388 | 388 | |
|
389 | 389 | @classmethod |
|
390 | 390 | def create(cls, form_data): |
|
391 | 391 | try: |
|
392 | 392 | new_user_group = cls() |
|
393 | 393 | for k, v in form_data.items(): |
|
394 | 394 | setattr(new_user_group, k, v) |
|
395 | 395 | |
|
396 | 396 | Session.add(new_user_group) |
|
397 | 397 | Session.commit() |
|
398 | 398 | return new_user_group |
|
399 | 399 | except: |
|
400 | 400 | log.error(traceback.format_exc()) |
|
401 | 401 | Session.rollback() |
|
402 | 402 | raise |
|
403 | 403 | |
|
404 | 404 | @classmethod |
|
405 | 405 | def update(cls, users_group_id, form_data): |
|
406 | 406 | |
|
407 | 407 | try: |
|
408 | 408 | users_group = cls.get(users_group_id, cache=False) |
|
409 | 409 | |
|
410 | 410 | for k, v in form_data.items(): |
|
411 | 411 | if k == 'users_group_members': |
|
412 | 412 | users_group.members = [] |
|
413 | 413 | Session.flush() |
|
414 | 414 | members_list = [] |
|
415 | 415 | if v: |
|
416 | 416 | v = [v] if isinstance(v, basestring) else v |
|
417 | 417 | for u_id in set(v): |
|
418 | 418 | member = UserGroupMember(users_group_id, u_id) |
|
419 | 419 | members_list.append(member) |
|
420 | 420 | setattr(users_group, 'members', members_list) |
|
421 | 421 | setattr(users_group, k, v) |
|
422 | 422 | |
|
423 | 423 | Session.add(users_group) |
|
424 | 424 | Session.commit() |
|
425 | 425 | except: |
|
426 | 426 | log.error(traceback.format_exc()) |
|
427 | 427 | Session.rollback() |
|
428 | 428 | raise |
|
429 | 429 | |
|
430 | 430 | @classmethod |
|
431 | 431 | def delete(cls, user_group_id): |
|
432 | 432 | try: |
|
433 | 433 | |
|
434 | 434 | # check if this group is not assigned to repo |
|
435 | 435 | assigned_groups = UserGroupRepoToPerm.query()\ |
|
436 | 436 | .filter(UserGroupRepoToPerm.users_group_id == |
|
437 | 437 | user_group_id).all() |
|
438 | 438 | |
|
439 | 439 | if assigned_groups: |
|
440 | 440 | raise UserGroupAssignedException( |
|
441 | 441 | 'UserGroup assigned to %s' % assigned_groups) |
|
442 | 442 | |
|
443 | 443 | users_group = cls.get(user_group_id, cache=False) |
|
444 | 444 | Session.delete(users_group) |
|
445 | 445 | Session.commit() |
|
446 | 446 | except: |
|
447 | 447 | log.error(traceback.format_exc()) |
|
448 | 448 | Session.rollback() |
|
449 | 449 | raise |
|
450 | 450 | |
|
451 | 451 | class UserGroupMember(Base, BaseModel): |
|
452 | 452 | __tablename__ = 'users_groups_members' |
|
453 | 453 | __table_args__ = {'extend_existing':True} |
|
454 | 454 | |
|
455 | 455 | users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
456 | 456 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
|
457 | 457 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
458 | 458 | |
|
459 | 459 | user = relationship('User', lazy='joined') |
|
460 | 460 | users_group = relationship('UserGroup') |
|
461 | 461 | |
|
462 | 462 | def __init__(self, gr_id='', u_id=''): |
|
463 | 463 | self.users_group_id = gr_id |
|
464 | 464 | self.user_id = u_id |
|
465 | 465 | |
|
466 | 466 | @staticmethod |
|
467 | 467 | def add_user_to_group(group, user): |
|
468 | 468 | ugm = UserGroupMember() |
|
469 | 469 | ugm.users_group = group |
|
470 | 470 | ugm.user = user |
|
471 | 471 | Session.add(ugm) |
|
472 | 472 | Session.commit() |
|
473 | 473 | return ugm |
|
474 | 474 | |
|
475 | 475 | class Repository(Base, BaseModel): |
|
476 | 476 | __tablename__ = 'repositories' |
|
477 | 477 | __table_args__ = (UniqueConstraint('repo_name'), {'extend_existing':True},) |
|
478 | 478 | |
|
479 | 479 | repo_id = Column("repo_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
480 | 480 | repo_name = Column("repo_name", String(255), nullable=False, unique=True, default=None) |
|
481 | 481 | clone_uri = Column("clone_uri", String(255), nullable=True, unique=False, default=None) |
|
482 | 482 | repo_type = Column("repo_type", String(255), nullable=False, unique=False, default='hg') |
|
483 | 483 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None) |
|
484 | 484 | private = Column("private", Boolean(), nullable=True, unique=None, default=None) |
|
485 | 485 | enable_statistics = Column("statistics", Boolean(), nullable=True, unique=None, default=True) |
|
486 | 486 | enable_downloads = Column("downloads", Boolean(), nullable=True, unique=None, default=True) |
|
487 | 487 | description = Column("description", String(10000), nullable=True, unique=None, default=None) |
|
488 | 488 | created_on = Column('created_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now) |
|
489 | 489 | |
|
490 | 490 | fork_id = Column("fork_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=False, default=None) |
|
491 | 491 | group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=False, default=None) |
|
492 | 492 | |
|
493 | 493 | |
|
494 | 494 | user = relationship('User') |
|
495 | 495 | fork = relationship('Repository', remote_side=repo_id) |
|
496 | 496 | group = relationship('RepoGroup') |
|
497 | 497 | repo_to_perm = relationship('UserRepoToPerm', cascade='all', order_by='UserRepoToPerm.repo_to_perm_id') |
|
498 | 498 | users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all') |
|
499 | 499 | stats = relationship('Statistics', cascade='all', uselist=False) |
|
500 | 500 | |
|
501 | 501 | followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id', cascade='all') |
|
502 | 502 | |
|
503 | 503 | logs = relationship('UserLog', cascade='all') |
|
504 | 504 | |
|
505 | 505 | def __repr__(self): |
|
506 | 506 | return "<%s('%s:%s')>" % (self.__class__.__name__, |
|
507 | 507 | self.repo_id, self.repo_name) |
|
508 | 508 | |
|
509 | 509 | @classmethod |
|
510 | 510 | def url_sep(cls): |
|
511 | 511 | return '/' |
|
512 | 512 | |
|
513 | 513 | @classmethod |
|
514 | 514 | def get_by_repo_name(cls, repo_name): |
|
515 | 515 | q = Session.query(cls).filter(cls.repo_name == repo_name) |
|
516 | 516 | q = q.options(joinedload(Repository.fork))\ |
|
517 | 517 | .options(joinedload(Repository.user))\ |
|
518 | 518 | .options(joinedload(Repository.group)) |
|
519 | 519 | return q.one() |
|
520 | 520 | |
|
521 | 521 | @classmethod |
|
522 | 522 | def get_repo_forks(cls, repo_id): |
|
523 | 523 | return cls.query().filter(Repository.fork_id == repo_id) |
|
524 | 524 | |
|
525 | 525 | @classmethod |
|
526 | 526 | def base_path(cls): |
|
527 | 527 | """ |
|
528 | 528 | Returns base path when all repos are stored |
|
529 | 529 | |
|
530 | 530 | :param cls: |
|
531 | 531 | """ |
|
532 | 532 | q = Session.query(RhodeCodeUi).filter(RhodeCodeUi.ui_key == |
|
533 | 533 | cls.url_sep()) |
|
534 | 534 | q.options(FromCache("sql_cache_short", "repository_repo_path")) |
|
535 | 535 | return q.one().ui_value |
|
536 | 536 | |
|
537 | 537 | @property |
|
538 | 538 | def just_name(self): |
|
539 | 539 | return self.repo_name.split(Repository.url_sep())[-1] |
|
540 | 540 | |
|
541 | 541 | @property |
|
542 | 542 | def groups_with_parents(self): |
|
543 | 543 | groups = [] |
|
544 | 544 | if self.group is None: |
|
545 | 545 | return groups |
|
546 | 546 | |
|
547 | 547 | cur_gr = self.group |
|
548 | 548 | groups.insert(0, cur_gr) |
|
549 | 549 | while 1: |
|
550 | 550 | gr = getattr(cur_gr, 'parent_group', None) |
|
551 | 551 | cur_gr = cur_gr.parent_group |
|
552 | 552 | if gr is None: |
|
553 | 553 | break |
|
554 | 554 | groups.insert(0, gr) |
|
555 | 555 | |
|
556 | 556 | return groups |
|
557 | 557 | |
|
558 | 558 | @property |
|
559 | 559 | def groups_and_repo(self): |
|
560 | 560 | return self.groups_with_parents, self.just_name |
|
561 | 561 | |
|
562 | 562 | @LazyProperty |
|
563 | 563 | def repo_path(self): |
|
564 | 564 | """ |
|
565 | 565 | Returns base full path for that repository means where it actually |
|
566 | 566 | exists on a filesystem |
|
567 | 567 | """ |
|
568 | 568 | q = Session.query(RhodeCodeUi).filter(RhodeCodeUi.ui_key == |
|
569 | 569 | Repository.url_sep()) |
|
570 | 570 | q.options(FromCache("sql_cache_short", "repository_repo_path")) |
|
571 | 571 | return q.one().ui_value |
|
572 | 572 | |
|
573 | 573 | @property |
|
574 | 574 | def repo_full_path(self): |
|
575 | 575 | p = [self.repo_path] |
|
576 | 576 | # we need to split the name by / since this is how we store the |
|
577 | 577 | # names in the database, but that eventually needs to be converted |
|
578 | 578 | # into a valid system path |
|
579 | 579 | p += self.repo_name.split(Repository.url_sep()) |
|
580 | 580 | return os.path.join(*p) |
|
581 | 581 | |
|
582 | 582 | def get_new_name(self, repo_name): |
|
583 | 583 | """ |
|
584 | 584 | returns new full repository name based on assigned group and new new |
|
585 | 585 | |
|
586 | 586 | :param group_name: |
|
587 | 587 | """ |
|
588 | 588 | path_prefix = self.group.full_path_splitted if self.group else [] |
|
589 | 589 | return Repository.url_sep().join(path_prefix + [repo_name]) |
|
590 | 590 | |
|
591 | 591 | @property |
|
592 | 592 | def _config(self): |
|
593 | 593 | """ |
|
594 | 594 | Returns db based config object. |
|
595 | 595 | """ |
|
596 | 596 | from rhodecode.lib.utils import make_db_config |
|
597 | 597 | return make_db_config(clear_session=False) |
|
598 | 598 | |
|
599 | 599 | @classmethod |
|
600 | 600 | def is_valid(cls, repo_name): |
|
601 | 601 | """ |
|
602 | 602 | returns True if given repo name is a valid filesystem repository |
|
603 | 603 | |
|
604 | 604 | :param cls: |
|
605 | 605 | :param repo_name: |
|
606 | 606 | """ |
|
607 | 607 | from rhodecode.lib.utils import is_valid_repo |
|
608 | 608 | |
|
609 | 609 | return is_valid_repo(repo_name, cls.base_path()) |
|
610 | 610 | |
|
611 | 611 | |
|
612 | 612 | #========================================================================== |
|
613 | 613 | # SCM PROPERTIES |
|
614 | 614 | #========================================================================== |
|
615 | 615 | |
|
616 | 616 | def get_commit(self, rev): |
|
617 | 617 | return get_commit_safe(self.scm_instance, rev) |
|
618 | 618 | |
|
619 | 619 | @property |
|
620 | 620 | def tip(self): |
|
621 | 621 | return self.get_commit('tip') |
|
622 | 622 | |
|
623 | 623 | @property |
|
624 | 624 | def author(self): |
|
625 | 625 | return self.tip.author |
|
626 | 626 | |
|
627 | 627 | @property |
|
628 | 628 | def last_change(self): |
|
629 | 629 | return self.scm_instance.last_change |
|
630 | 630 | |
|
631 | 631 | #========================================================================== |
|
632 | 632 | # SCM CACHE INSTANCE |
|
633 | 633 | #========================================================================== |
|
634 | 634 | |
|
635 | 635 | @property |
|
636 | 636 | def invalidate(self): |
|
637 | 637 | return CacheInvalidation.invalidate(self.repo_name) |
|
638 | 638 | |
|
639 | 639 | def set_invalidate(self): |
|
640 | 640 | """ |
|
641 | 641 | set a cache for invalidation for this instance |
|
642 | 642 | """ |
|
643 | 643 | CacheInvalidation.set_invalidate(self.repo_name) |
|
644 | 644 | |
|
645 | 645 | @LazyProperty |
|
646 | 646 | def scm_instance(self): |
|
647 | 647 | return self.__get_instance() |
|
648 | 648 | |
|
649 | 649 | @property |
|
650 | 650 | def scm_instance_cached(self): |
|
651 | 651 | return self.__get_instance() |
|
652 | 652 | |
|
653 | 653 | def __get_instance(self): |
|
654 | 654 | |
|
655 | 655 | repo_full_path = self.repo_full_path |
|
656 | 656 | |
|
657 | 657 | try: |
|
658 | 658 | alias = get_scm(repo_full_path)[0] |
|
659 |
log.debug('Creating instance of %s repository' |
|
|
659 | log.debug('Creating instance of %s repository', alias) | |
|
660 | 660 | backend = get_backend(alias) |
|
661 | 661 | except VCSError: |
|
662 | 662 | log.error(traceback.format_exc()) |
|
663 | 663 | log.error('Perhaps this repository is in db and not in ' |
|
664 | 664 | 'filesystem run rescan repositories with ' |
|
665 | 665 | '"destroy old data " option from admin panel') |
|
666 | 666 | return |
|
667 | 667 | |
|
668 | 668 | if alias == 'hg': |
|
669 | 669 | |
|
670 | 670 | repo = backend(safe_str(repo_full_path), create=False, |
|
671 | 671 | config=self._config) |
|
672 | 672 | |
|
673 | 673 | else: |
|
674 | 674 | repo = backend(repo_full_path, create=False) |
|
675 | 675 | |
|
676 | 676 | return repo |
|
677 | 677 | |
|
678 | 678 | |
|
679 | 679 | class Group(Base, BaseModel): |
|
680 | 680 | __tablename__ = 'groups' |
|
681 | 681 | __table_args__ = (UniqueConstraint('group_name', 'group_parent_id'), |
|
682 | 682 | CheckConstraint('group_id != group_parent_id'), {'extend_existing':True},) |
|
683 | 683 | __mapper_args__ = {'order_by':'group_name'} |
|
684 | 684 | |
|
685 | 685 | group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
686 | 686 | group_name = Column("group_name", String(255), nullable=False, unique=True, default=None) |
|
687 | 687 | group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None) |
|
688 | 688 | group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None) |
|
689 | 689 | |
|
690 | 690 | parent_group = relationship('Group', remote_side=group_id) |
|
691 | 691 | |
|
692 | 692 | def __init__(self, group_name='', parent_group=None): |
|
693 | 693 | self.group_name = group_name |
|
694 | 694 | self.parent_group = parent_group |
|
695 | 695 | |
|
696 | 696 | def __repr__(self): |
|
697 | 697 | return "<%s('%s:%s')>" % (self.__class__.__name__, self.group_id, |
|
698 | 698 | self.group_name) |
|
699 | 699 | |
|
700 | 700 | @classmethod |
|
701 | 701 | def url_sep(cls): |
|
702 | 702 | return '/' |
|
703 | 703 | |
|
704 | 704 | @classmethod |
|
705 | 705 | def get_by_group_name(cls, group_name, cache=False, case_insensitive=False): |
|
706 | 706 | if case_insensitive: |
|
707 | 707 | gr = cls.query()\ |
|
708 | 708 | .filter(cls.group_name.ilike(group_name)) |
|
709 | 709 | else: |
|
710 | 710 | gr = cls.query()\ |
|
711 | 711 | .filter(cls.group_name == group_name) |
|
712 | 712 | if cache: |
|
713 | 713 | gr = gr.options(FromCache("sql_cache_short", |
|
714 | 714 | "get_group_%s" % group_name)) |
|
715 | 715 | return gr.scalar() |
|
716 | 716 | |
|
717 | 717 | @property |
|
718 | 718 | def parents(self): |
|
719 | 719 | parents_recursion_limit = 5 |
|
720 | 720 | groups = [] |
|
721 | 721 | if self.parent_group is None: |
|
722 | 722 | return groups |
|
723 | 723 | cur_gr = self.parent_group |
|
724 | 724 | groups.insert(0, cur_gr) |
|
725 | 725 | cnt = 0 |
|
726 | 726 | while 1: |
|
727 | 727 | cnt += 1 |
|
728 | 728 | gr = getattr(cur_gr, 'parent_group', None) |
|
729 | 729 | cur_gr = cur_gr.parent_group |
|
730 | 730 | if gr is None: |
|
731 | 731 | break |
|
732 | 732 | if cnt == parents_recursion_limit: |
|
733 | 733 | # this will prevent accidental infinit loops |
|
734 |
log.error('group nested more than %s' |
|
|
734 | log.error('group nested more than %s', | |
|
735 | 735 | parents_recursion_limit) |
|
736 | 736 | break |
|
737 | 737 | |
|
738 | 738 | groups.insert(0, gr) |
|
739 | 739 | return groups |
|
740 | 740 | |
|
741 | 741 | @property |
|
742 | 742 | def children(self): |
|
743 | 743 | return Group.query().filter(Group.parent_group == self) |
|
744 | 744 | |
|
745 | 745 | @property |
|
746 | 746 | def name(self): |
|
747 | 747 | return self.group_name.split(Group.url_sep())[-1] |
|
748 | 748 | |
|
749 | 749 | @property |
|
750 | 750 | def full_path(self): |
|
751 | 751 | return self.group_name |
|
752 | 752 | |
|
753 | 753 | @property |
|
754 | 754 | def full_path_splitted(self): |
|
755 | 755 | return self.group_name.split(Group.url_sep()) |
|
756 | 756 | |
|
757 | 757 | @property |
|
758 | 758 | def repositories(self): |
|
759 | 759 | return Repository.query().filter(Repository.group == self) |
|
760 | 760 | |
|
761 | 761 | @property |
|
762 | 762 | def repositories_recursive_count(self): |
|
763 | 763 | cnt = self.repositories.count() |
|
764 | 764 | |
|
765 | 765 | def children_count(group): |
|
766 | 766 | cnt = 0 |
|
767 | 767 | for child in group.children: |
|
768 | 768 | cnt += child.repositories.count() |
|
769 | 769 | cnt += children_count(child) |
|
770 | 770 | return cnt |
|
771 | 771 | |
|
772 | 772 | return cnt + children_count(self) |
|
773 | 773 | |
|
774 | 774 | |
|
775 | 775 | def get_new_name(self, group_name): |
|
776 | 776 | """ |
|
777 | 777 | returns new full group name based on parent and new name |
|
778 | 778 | |
|
779 | 779 | :param group_name: |
|
780 | 780 | """ |
|
781 | 781 | path_prefix = (self.parent_group.full_path_splitted if |
|
782 | 782 | self.parent_group else []) |
|
783 | 783 | return Group.url_sep().join(path_prefix + [group_name]) |
|
784 | 784 | |
|
785 | 785 | |
|
786 | 786 | class Permission(Base, BaseModel): |
|
787 | 787 | __tablename__ = 'permissions' |
|
788 | 788 | __table_args__ = {'extend_existing':True} |
|
789 | 789 | permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
790 | 790 | permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None) |
|
791 | 791 | permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None) |
|
792 | 792 | |
|
793 | 793 | def __repr__(self): |
|
794 | 794 | return "<%s('%s:%s')>" % (self.__class__.__name__, |
|
795 | 795 | self.permission_id, self.permission_name) |
|
796 | 796 | |
|
797 | 797 | @classmethod |
|
798 | 798 | def get_by_key(cls, key): |
|
799 | 799 | return cls.query().filter(cls.permission_name == key).scalar() |
|
800 | 800 | |
|
801 | 801 | class UserRepoToPerm(Base, BaseModel): |
|
802 | 802 | __tablename__ = 'repo_to_perm' |
|
803 | 803 | __table_args__ = (UniqueConstraint('user_id', 'repository_id'), {'extend_existing':True}) |
|
804 | 804 | repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
805 | 805 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
806 | 806 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
807 | 807 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) |
|
808 | 808 | |
|
809 | 809 | user = relationship('User') |
|
810 | 810 | permission = relationship('Permission') |
|
811 | 811 | repository = relationship('Repository') |
|
812 | 812 | |
|
813 | 813 | class UserToPerm(Base, BaseModel): |
|
814 | 814 | __tablename__ = 'user_to_perm' |
|
815 | 815 | __table_args__ = (UniqueConstraint('user_id', 'permission_id'), {'extend_existing':True}) |
|
816 | 816 | user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
817 | 817 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
818 | 818 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
819 | 819 | |
|
820 | 820 | user = relationship('User') |
|
821 | 821 | permission = relationship('Permission') |
|
822 | 822 | |
|
823 | 823 | @classmethod |
|
824 | 824 | def has_perm(cls, user_id, perm): |
|
825 | 825 | if not isinstance(perm, Permission): |
|
826 | 826 | raise Exception('perm needs to be an instance of Permission class') |
|
827 | 827 | |
|
828 | 828 | return cls.query().filter(cls.user_id == user_id)\ |
|
829 | 829 | .filter(cls.permission == perm).scalar() is not None |
|
830 | 830 | |
|
831 | 831 | @classmethod |
|
832 | 832 | def grant_perm(cls, user_id, perm): |
|
833 | 833 | if not isinstance(perm, Permission): |
|
834 | 834 | raise Exception('perm needs to be an instance of Permission class') |
|
835 | 835 | |
|
836 | 836 | new = cls() |
|
837 | 837 | new.user_id = user_id |
|
838 | 838 | new.permission = perm |
|
839 | 839 | try: |
|
840 | 840 | Session.add(new) |
|
841 | 841 | Session.commit() |
|
842 | 842 | except: |
|
843 | 843 | Session.rollback() |
|
844 | 844 | |
|
845 | 845 | |
|
846 | 846 | @classmethod |
|
847 | 847 | def revoke_perm(cls, user_id, perm): |
|
848 | 848 | if not isinstance(perm, Permission): |
|
849 | 849 | raise Exception('perm needs to be an instance of Permission class') |
|
850 | 850 | |
|
851 | 851 | try: |
|
852 | 852 | cls.query().filter(cls.user_id == user_id) \ |
|
853 | 853 | .filter(cls.permission == perm).delete() |
|
854 | 854 | Session.commit() |
|
855 | 855 | except: |
|
856 | 856 | Session.rollback() |
|
857 | 857 | |
|
858 | 858 | class UserGroupRepoToPerm(Base, BaseModel): |
|
859 | 859 | __tablename__ = 'users_group_repo_to_perm' |
|
860 | 860 | __table_args__ = (UniqueConstraint('repository_id', 'users_group_id', 'permission_id'), {'extend_existing':True}) |
|
861 | 861 | users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
862 | 862 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
|
863 | 863 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
864 | 864 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) |
|
865 | 865 | |
|
866 | 866 | users_group = relationship('UserGroup') |
|
867 | 867 | permission = relationship('Permission') |
|
868 | 868 | repository = relationship('Repository') |
|
869 | 869 | |
|
870 | 870 | def __repr__(self): |
|
871 | 871 | return '<userGroup:%s => %s >' % (self.users_group, self.repository) |
|
872 | 872 | |
|
873 | 873 | class UserGroupToPerm(Base, BaseModel): |
|
874 | 874 | __tablename__ = 'users_group_to_perm' |
|
875 | 875 | __table_args__ = {'extend_existing':True} |
|
876 | 876 | users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
877 | 877 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
|
878 | 878 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
879 | 879 | |
|
880 | 880 | users_group = relationship('UserGroup') |
|
881 | 881 | permission = relationship('Permission') |
|
882 | 882 | |
|
883 | 883 | |
|
884 | 884 | @classmethod |
|
885 | 885 | def has_perm(cls, users_group_id, perm): |
|
886 | 886 | if not isinstance(perm, Permission): |
|
887 | 887 | raise Exception('perm needs to be an instance of Permission class') |
|
888 | 888 | |
|
889 | 889 | return cls.query().filter(cls.users_group_id == |
|
890 | 890 | users_group_id)\ |
|
891 | 891 | .filter(cls.permission == perm)\ |
|
892 | 892 | .scalar() is not None |
|
893 | 893 | |
|
894 | 894 | @classmethod |
|
895 | 895 | def grant_perm(cls, users_group_id, perm): |
|
896 | 896 | if not isinstance(perm, Permission): |
|
897 | 897 | raise Exception('perm needs to be an instance of Permission class') |
|
898 | 898 | |
|
899 | 899 | new = cls() |
|
900 | 900 | new.users_group_id = users_group_id |
|
901 | 901 | new.permission = perm |
|
902 | 902 | try: |
|
903 | 903 | Session.add(new) |
|
904 | 904 | Session.commit() |
|
905 | 905 | except: |
|
906 | 906 | Session.rollback() |
|
907 | 907 | |
|
908 | 908 | |
|
909 | 909 | @classmethod |
|
910 | 910 | def revoke_perm(cls, users_group_id, perm): |
|
911 | 911 | if not isinstance(perm, Permission): |
|
912 | 912 | raise Exception('perm needs to be an instance of Permission class') |
|
913 | 913 | |
|
914 | 914 | try: |
|
915 | 915 | cls.query().filter(cls.users_group_id == users_group_id) \ |
|
916 | 916 | .filter(cls.permission == perm).delete() |
|
917 | 917 | Session.commit() |
|
918 | 918 | except: |
|
919 | 919 | Session.rollback() |
|
920 | 920 | |
|
921 | 921 | |
|
922 | 922 | class UserRepoGroupToPerm(Base, BaseModel): |
|
923 | 923 | __tablename__ = 'group_to_perm' |
|
924 | 924 | __table_args__ = (UniqueConstraint('group_id', 'permission_id'), {'extend_existing':True}) |
|
925 | 925 | |
|
926 | 926 | group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
927 | 927 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
928 | 928 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
929 | 929 | group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None) |
|
930 | 930 | |
|
931 | 931 | user = relationship('User') |
|
932 | 932 | permission = relationship('Permission') |
|
933 | 933 | group = relationship('RepoGroup') |
|
934 | 934 | |
|
935 | 935 | class Statistics(Base, BaseModel): |
|
936 | 936 | __tablename__ = 'statistics' |
|
937 | 937 | __table_args__ = (UniqueConstraint('repository_id'), {'extend_existing':True}) |
|
938 | 938 | stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
939 | 939 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None) |
|
940 | 940 | stat_on_revision = Column("stat_on_revision", Integer(), nullable=False) |
|
941 | 941 | commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data |
|
942 | 942 | commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data |
|
943 | 943 | languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data |
|
944 | 944 | |
|
945 | 945 | repository = relationship('Repository', single_parent=True) |
|
946 | 946 | |
|
947 | 947 | class UserFollowing(Base, BaseModel): |
|
948 | 948 | __tablename__ = 'user_followings' |
|
949 | 949 | __table_args__ = (UniqueConstraint('user_id', 'follows_repository_id'), |
|
950 | 950 | UniqueConstraint('user_id', 'follows_user_id') |
|
951 | 951 | , {'extend_existing':True}) |
|
952 | 952 | |
|
953 | 953 | user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
954 | 954 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
955 | 955 | follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None) |
|
956 | 956 | follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) |
|
957 | 957 | follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now) |
|
958 | 958 | |
|
959 | 959 | user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id') |
|
960 | 960 | |
|
961 | 961 | follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id') |
|
962 | 962 | follows_repository = relationship('Repository', order_by='Repository.repo_name') |
|
963 | 963 | |
|
964 | 964 | |
|
965 | 965 | @classmethod |
|
966 | 966 | def get_repo_followers(cls, repo_id): |
|
967 | 967 | return cls.query().filter(cls.follows_repo_id == repo_id) |
|
968 | 968 | |
|
969 | 969 | class CacheInvalidation(Base, BaseModel): |
|
970 | 970 | __tablename__ = 'cache_invalidation' |
|
971 | 971 | __table_args__ = (UniqueConstraint('cache_key'), {'extend_existing':True}) |
|
972 | 972 | cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
973 | 973 | cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None) |
|
974 | 974 | cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None) |
|
975 | 975 | cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False) |
|
976 | 976 | |
|
977 | 977 | |
|
978 | 978 | def __init__(self, cache_key, cache_args=''): |
|
979 | 979 | self.cache_key = cache_key |
|
980 | 980 | self.cache_args = cache_args |
|
981 | 981 | self.cache_active = False |
|
982 | 982 | |
|
983 | 983 | def __repr__(self): |
|
984 | 984 | return "<%s('%s:%s')>" % (self.__class__.__name__, |
|
985 | 985 | self.cache_id, self.cache_key) |
|
986 | 986 | |
|
987 | 987 | @classmethod |
|
988 | 988 | def invalidate(cls, key): |
|
989 | 989 | """ |
|
990 | 990 | Returns Invalidation object if this given key should be invalidated |
|
991 | 991 | None otherwise. `cache_active = False` means that this cache |
|
992 | 992 | state is not valid and needs to be invalidated |
|
993 | 993 | |
|
994 | 994 | :param key: |
|
995 | 995 | """ |
|
996 | 996 | return cls.query()\ |
|
997 | 997 | .filter(CacheInvalidation.cache_key == key)\ |
|
998 | 998 | .filter(CacheInvalidation.cache_active == False)\ |
|
999 | 999 | .scalar() |
|
1000 | 1000 | |
|
1001 | 1001 | @classmethod |
|
1002 | 1002 | def set_invalidate(cls, key): |
|
1003 | 1003 | """ |
|
1004 | 1004 | Mark this Cache key for invalidation |
|
1005 | 1005 | |
|
1006 | 1006 | :param key: |
|
1007 | 1007 | """ |
|
1008 | 1008 | |
|
1009 |
log.debug('marking %s for invalidation' |
|
|
1009 | log.debug('marking %s for invalidation', key) | |
|
1010 | 1010 | inv_obj = Session.query(cls)\ |
|
1011 | 1011 | .filter(cls.cache_key == key).scalar() |
|
1012 | 1012 | if inv_obj: |
|
1013 | 1013 | inv_obj.cache_active = False |
|
1014 | 1014 | else: |
|
1015 | 1015 | log.debug('cache key not found in invalidation db -> creating one') |
|
1016 | 1016 | inv_obj = CacheInvalidation(key) |
|
1017 | 1017 | |
|
1018 | 1018 | try: |
|
1019 | 1019 | Session.add(inv_obj) |
|
1020 | 1020 | Session.commit() |
|
1021 | 1021 | except Exception: |
|
1022 | 1022 | log.error(traceback.format_exc()) |
|
1023 | 1023 | Session.rollback() |
|
1024 | 1024 | |
|
1025 | 1025 | @classmethod |
|
1026 | 1026 | def set_valid(cls, key): |
|
1027 | 1027 | """ |
|
1028 | 1028 | Mark this cache key as active and currently cached |
|
1029 | 1029 | |
|
1030 | 1030 | :param key: |
|
1031 | 1031 | """ |
|
1032 | 1032 | inv_obj = Session.query(CacheInvalidation)\ |
|
1033 | 1033 | .filter(CacheInvalidation.cache_key == key).scalar() |
|
1034 | 1034 | inv_obj.cache_active = True |
|
1035 | 1035 | Session.add(inv_obj) |
|
1036 | 1036 | Session.commit() |
|
1037 | 1037 | |
|
1038 | 1038 | class DbMigrateVersion(Base, BaseModel): |
|
1039 | 1039 | __tablename__ = 'db_migrate_version' |
|
1040 | 1040 | __table_args__ = {'extend_existing':True} |
|
1041 | 1041 | repository_id = Column('repository_id', String(250), primary_key=True) |
|
1042 | 1042 | repository_path = Column('repository_path', Text) |
|
1043 | 1043 | version = Column('version', Integer) |
@@ -1,1266 +1,1264 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2018 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import os |
|
22 | 22 | import logging |
|
23 | 23 | import datetime |
|
24 | 24 | import traceback |
|
25 | 25 | from collections import defaultdict |
|
26 | 26 | |
|
27 | 27 | from sqlalchemy import * |
|
28 | 28 | from sqlalchemy.ext.hybrid import hybrid_property |
|
29 | 29 | from sqlalchemy.orm import relationship, joinedload, class_mapper, validates |
|
30 | 30 | from beaker.cache import cache_region, region_invalidate |
|
31 | 31 | |
|
32 | 32 | from rhodecode.lib.vcs import get_backend |
|
33 | 33 | from rhodecode.lib.vcs.utils.helpers import get_scm |
|
34 | 34 | from rhodecode.lib.vcs.exceptions import VCSError |
|
35 | 35 | from zope.cachedescriptors.property import Lazy as LazyProperty |
|
36 | 36 | |
|
37 | 37 | from rhodecode.lib.utils2 import str2bool, safe_str, get_commit_safe, \ |
|
38 | 38 | safe_unicode |
|
39 | 39 | from rhodecode.lib.ext_json import json |
|
40 | 40 | from rhodecode.lib.caching_query import FromCache |
|
41 | 41 | |
|
42 | 42 | from rhodecode.model.meta import Base, Session |
|
43 | 43 | import hashlib |
|
44 | 44 | |
|
45 | 45 | |
|
46 | 46 | log = logging.getLogger(__name__) |
|
47 | 47 | |
|
48 | 48 | #============================================================================== |
|
49 | 49 | # BASE CLASSES |
|
50 | 50 | #============================================================================== |
|
51 | 51 | |
|
52 | 52 | _hash_key = lambda k: hashlib.md5(safe_str(k)).hexdigest() |
|
53 | 53 | |
|
54 | 54 | |
|
55 | 55 | class ModelSerializer(json.JSONEncoder): |
|
56 | 56 | """ |
|
57 | 57 | Simple Serializer for JSON, |
|
58 | 58 | |
|
59 | 59 | usage:: |
|
60 | 60 | |
|
61 | 61 | to make object customized for serialization implement a __json__ |
|
62 | 62 | method that will return a dict for serialization into json |
|
63 | 63 | |
|
64 | 64 | example:: |
|
65 | 65 | |
|
66 | 66 | class Task(object): |
|
67 | 67 | |
|
68 | 68 | def __init__(self, name, value): |
|
69 | 69 | self.name = name |
|
70 | 70 | self.value = value |
|
71 | 71 | |
|
72 | 72 | def __json__(self): |
|
73 | 73 | return dict(name=self.name, |
|
74 | 74 | value=self.value) |
|
75 | 75 | |
|
76 | 76 | """ |
|
77 | 77 | |
|
78 | 78 | def default(self, obj): |
|
79 | 79 | |
|
80 | 80 | if hasattr(obj, '__json__'): |
|
81 | 81 | return obj.__json__() |
|
82 | 82 | else: |
|
83 | 83 | return json.JSONEncoder.default(self, obj) |
|
84 | 84 | |
|
85 | 85 | |
|
86 | 86 | class BaseModel(object): |
|
87 | 87 | """ |
|
88 | 88 | Base Model for all classess |
|
89 | 89 | """ |
|
90 | 90 | |
|
91 | 91 | @classmethod |
|
92 | 92 | def _get_keys(cls): |
|
93 | 93 | """return column names for this model """ |
|
94 | 94 | return class_mapper(cls).c.keys() |
|
95 | 95 | |
|
96 | 96 | def get_dict(self): |
|
97 | 97 | """ |
|
98 | 98 | return dict with keys and values corresponding |
|
99 | 99 | to this model data """ |
|
100 | 100 | |
|
101 | 101 | d = {} |
|
102 | 102 | for k in self._get_keys(): |
|
103 | 103 | d[k] = getattr(self, k) |
|
104 | 104 | |
|
105 | 105 | # also use __json__() if present to get additional fields |
|
106 | 106 | for k, val in getattr(self, '__json__', lambda: {})().iteritems(): |
|
107 | 107 | d[k] = val |
|
108 | 108 | return d |
|
109 | 109 | |
|
110 | 110 | def get_appstruct(self): |
|
111 | 111 | """return list with keys and values tupples corresponding |
|
112 | 112 | to this model data """ |
|
113 | 113 | |
|
114 | 114 | l = [] |
|
115 | 115 | for k in self._get_keys(): |
|
116 | 116 | l.append((k, getattr(self, k),)) |
|
117 | 117 | return l |
|
118 | 118 | |
|
119 | 119 | def populate_obj(self, populate_dict): |
|
120 | 120 | """populate model with data from given populate_dict""" |
|
121 | 121 | |
|
122 | 122 | for k in self._get_keys(): |
|
123 | 123 | if k in populate_dict: |
|
124 | 124 | setattr(self, k, populate_dict[k]) |
|
125 | 125 | |
|
126 | 126 | @classmethod |
|
127 | 127 | def query(cls): |
|
128 | 128 | return Session.query(cls) |
|
129 | 129 | |
|
130 | 130 | @classmethod |
|
131 | 131 | def get(cls, id_): |
|
132 | 132 | if id_: |
|
133 | 133 | return cls.query().get(id_) |
|
134 | 134 | |
|
135 | 135 | @classmethod |
|
136 | 136 | def getAll(cls): |
|
137 | 137 | return cls.query().all() |
|
138 | 138 | |
|
139 | 139 | @classmethod |
|
140 | 140 | def delete(cls, id_): |
|
141 | 141 | obj = cls.query().get(id_) |
|
142 | 142 | Session.delete(obj) |
|
143 | 143 | |
|
144 | 144 | def __repr__(self): |
|
145 | 145 | if hasattr(self, '__unicode__'): |
|
146 | 146 | # python repr needs to return str |
|
147 | 147 | return safe_str(self.__unicode__()) |
|
148 | 148 | return '<DB:%s>' % (self.__class__.__name__) |
|
149 | 149 | |
|
150 | 150 | |
|
151 | 151 | class RhodeCodeSetting(Base, BaseModel): |
|
152 | 152 | __tablename__ = 'rhodecode_settings' |
|
153 | 153 | __table_args__ = ( |
|
154 | 154 | UniqueConstraint('app_settings_name'), |
|
155 | 155 | {'extend_existing': True, 'mysql_engine':'InnoDB', |
|
156 | 156 | 'mysql_charset': 'utf8'} |
|
157 | 157 | ) |
|
158 | 158 | app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
159 | 159 | app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None) |
|
160 | 160 | _app_settings_value = Column("app_settings_value", String(255), nullable=True, unique=None, default=None) |
|
161 | 161 | |
|
162 | 162 | def __init__(self, k='', v=''): |
|
163 | 163 | self.app_settings_name = k |
|
164 | 164 | self.app_settings_value = v |
|
165 | 165 | |
|
166 | 166 | @validates('_app_settings_value') |
|
167 | 167 | def validate_settings_value(self, key, val): |
|
168 | 168 | assert type(val) == unicode |
|
169 | 169 | return val |
|
170 | 170 | |
|
171 | 171 | @hybrid_property |
|
172 | 172 | def app_settings_value(self): |
|
173 | 173 | v = self._app_settings_value |
|
174 | 174 | if self.app_settings_name == 'ldap_active': |
|
175 | 175 | v = str2bool(v) |
|
176 | 176 | return v |
|
177 | 177 | |
|
178 | 178 | @app_settings_value.setter |
|
179 | 179 | def app_settings_value(self, val): |
|
180 | 180 | """ |
|
181 | 181 | Setter that will always make sure we use unicode in app_settings_value |
|
182 | 182 | |
|
183 | 183 | :param val: |
|
184 | 184 | """ |
|
185 | 185 | self._app_settings_value = safe_unicode(val) |
|
186 | 186 | |
|
187 | 187 | def __unicode__(self): |
|
188 | 188 | return u"<%s('%s:%s')>" % ( |
|
189 | 189 | self.__class__.__name__, |
|
190 | 190 | self.app_settings_name, self.app_settings_value |
|
191 | 191 | ) |
|
192 | 192 | |
|
193 | 193 | @classmethod |
|
194 | 194 | def get_by_name(cls, ldap_key): |
|
195 | 195 | return cls.query()\ |
|
196 | 196 | .filter(cls.app_settings_name == ldap_key).scalar() |
|
197 | 197 | |
|
198 | 198 | @classmethod |
|
199 | 199 | def get_app_settings(cls, cache=False): |
|
200 | 200 | |
|
201 | 201 | ret = cls.query() |
|
202 | 202 | |
|
203 | 203 | if cache: |
|
204 | 204 | ret = ret.options(FromCache("sql_cache_short", "get_hg_settings")) |
|
205 | 205 | |
|
206 | 206 | if not ret: |
|
207 | 207 | raise Exception('Could not get application settings !') |
|
208 | 208 | settings = {} |
|
209 | 209 | for each in ret: |
|
210 | 210 | settings['rhodecode_' + each.app_settings_name] = \ |
|
211 | 211 | each.app_settings_value |
|
212 | 212 | |
|
213 | 213 | return settings |
|
214 | 214 | |
|
215 | 215 | @classmethod |
|
216 | 216 | def get_ldap_settings(cls, cache=False): |
|
217 | 217 | ret = cls.query()\ |
|
218 | 218 | .filter(cls.app_settings_name.startswith('ldap_')).all() |
|
219 | 219 | fd = {} |
|
220 | 220 | for row in ret: |
|
221 | 221 | fd.update({row.app_settings_name:row.app_settings_value}) |
|
222 | 222 | |
|
223 | 223 | return fd |
|
224 | 224 | |
|
225 | 225 | |
|
226 | 226 | class RhodeCodeUi(Base, BaseModel): |
|
227 | 227 | __tablename__ = 'rhodecode_ui' |
|
228 | 228 | __table_args__ = ( |
|
229 | 229 | UniqueConstraint('ui_key'), |
|
230 | 230 | {'extend_existing': True, 'mysql_engine':'InnoDB', |
|
231 | 231 | 'mysql_charset': 'utf8'} |
|
232 | 232 | ) |
|
233 | 233 | |
|
234 | 234 | HOOK_REPO_SIZE = 'changegroup.repo_size' |
|
235 | 235 | HOOK_PUSH = 'pretxnchangegroup.push_logger' |
|
236 | 236 | HOOK_PULL = 'preoutgoing.pull_logger' |
|
237 | 237 | |
|
238 | 238 | ui_id = Column("ui_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
239 | 239 | ui_section = Column("ui_section", String(255), nullable=True, unique=None, default=None) |
|
240 | 240 | ui_key = Column("ui_key", String(255), nullable=True, unique=None, default=None) |
|
241 | 241 | ui_value = Column("ui_value", String(255), nullable=True, unique=None, default=None) |
|
242 | 242 | ui_active = Column("ui_active", Boolean(), nullable=True, unique=None, default=True) |
|
243 | 243 | |
|
244 | 244 | @classmethod |
|
245 | 245 | def get_by_key(cls, key): |
|
246 | 246 | return cls.query().filter(cls.ui_key == key) |
|
247 | 247 | |
|
248 | 248 | @classmethod |
|
249 | 249 | def get_builtin_hooks(cls): |
|
250 | 250 | q = cls.query() |
|
251 | 251 | q = q.filter(cls.ui_key.in_([cls.HOOK_REPO_SIZE, |
|
252 | 252 | cls.HOOK_PUSH, cls.HOOK_PULL])) |
|
253 | 253 | return q.all() |
|
254 | 254 | |
|
255 | 255 | @classmethod |
|
256 | 256 | def get_custom_hooks(cls): |
|
257 | 257 | q = cls.query() |
|
258 | 258 | q = q.filter(~cls.ui_key.in_([cls.HOOK_REPO_SIZE, |
|
259 | 259 | cls.HOOK_PUSH, cls.HOOK_PULL])) |
|
260 | 260 | q = q.filter(cls.ui_section == 'hooks') |
|
261 | 261 | return q.all() |
|
262 | 262 | |
|
263 | 263 | @classmethod |
|
264 | 264 | def create_or_update_hook(cls, key, val): |
|
265 | 265 | new_ui = cls.get_by_key(key).scalar() or cls() |
|
266 | 266 | new_ui.ui_section = 'hooks' |
|
267 | 267 | new_ui.ui_active = True |
|
268 | 268 | new_ui.ui_key = key |
|
269 | 269 | new_ui.ui_value = val |
|
270 | 270 | |
|
271 | 271 | Session.add(new_ui) |
|
272 | 272 | |
|
273 | 273 | |
|
274 | 274 | class User(Base, BaseModel): |
|
275 | 275 | __tablename__ = 'users' |
|
276 | 276 | __table_args__ = ( |
|
277 | 277 | UniqueConstraint('username'), UniqueConstraint('email'), |
|
278 | 278 | {'extend_existing': True, 'mysql_engine':'InnoDB', |
|
279 | 279 | 'mysql_charset': 'utf8'} |
|
280 | 280 | ) |
|
281 | 281 | user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
282 | 282 | username = Column("username", String(255), nullable=True, unique=None, default=None) |
|
283 | 283 | password = Column("password", String(255), nullable=True, unique=None, default=None) |
|
284 | 284 | active = Column("active", Boolean(), nullable=True, unique=None, default=None) |
|
285 | 285 | admin = Column("admin", Boolean(), nullable=True, unique=None, default=False) |
|
286 | 286 | name = Column("name", String(255), nullable=True, unique=None, default=None) |
|
287 | 287 | lastname = Column("lastname", String(255), nullable=True, unique=None, default=None) |
|
288 | 288 | _email = Column("email", String(255), nullable=True, unique=None, default=None) |
|
289 | 289 | last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None) |
|
290 | 290 | ldap_dn = Column("ldap_dn", String(255), nullable=True, unique=None, default=None) |
|
291 | 291 | api_key = Column("api_key", String(255), nullable=True, unique=None, default=None) |
|
292 | 292 | |
|
293 | 293 | user_log = relationship('UserLog', cascade='all') |
|
294 | 294 | user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all') |
|
295 | 295 | |
|
296 | 296 | repositories = relationship('Repository') |
|
297 | 297 | user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all') |
|
298 | 298 | repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all') |
|
299 | 299 | repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all') |
|
300 | 300 | |
|
301 | 301 | group_member = relationship('UserGroupMember', cascade='all') |
|
302 | 302 | |
|
303 | 303 | notifications = relationship('UserNotification', cascade='all') |
|
304 | 304 | # notifications assigned to this user |
|
305 | 305 | user_created_notifications = relationship('Notification', cascade='all') |
|
306 | 306 | # comments created by this user |
|
307 | 307 | user_comments = relationship('ChangesetComment', cascade='all') |
|
308 | 308 | |
|
309 | 309 | @hybrid_property |
|
310 | 310 | def email(self): |
|
311 | 311 | return self._email |
|
312 | 312 | |
|
313 | 313 | @email.setter |
|
314 | 314 | def email(self, val): |
|
315 | 315 | self._email = val.lower() if val else None |
|
316 | 316 | |
|
317 | 317 | @property |
|
318 | 318 | def full_name(self): |
|
319 | 319 | return '%s %s' % (self.name, self.lastname) |
|
320 | 320 | |
|
321 | 321 | @property |
|
322 | 322 | def full_name_or_username(self): |
|
323 | 323 | return ('%s %s' % (self.name, self.lastname) |
|
324 | 324 | if (self.name and self.lastname) else self.username) |
|
325 | 325 | |
|
326 | 326 | @property |
|
327 | 327 | def full_contact(self): |
|
328 | 328 | return '%s %s <%s>' % (self.name, self.lastname, self.email) |
|
329 | 329 | |
|
330 | 330 | @property |
|
331 | 331 | def short_contact(self): |
|
332 | 332 | return '%s %s' % (self.name, self.lastname) |
|
333 | 333 | |
|
334 | 334 | @property |
|
335 | 335 | def is_admin(self): |
|
336 | 336 | return self.admin |
|
337 | 337 | |
|
338 | 338 | def __unicode__(self): |
|
339 | 339 | return u"<%s('id:%s:%s')>" % (self.__class__.__name__, |
|
340 | 340 | self.user_id, self.username) |
|
341 | 341 | |
|
342 | 342 | @classmethod |
|
343 | 343 | def get_by_username(cls, username, case_insensitive=False, cache=False): |
|
344 | 344 | if case_insensitive: |
|
345 | 345 | q = cls.query().filter(cls.username.ilike(username)) |
|
346 | 346 | else: |
|
347 | 347 | q = cls.query().filter(cls.username == username) |
|
348 | 348 | |
|
349 | 349 | if cache: |
|
350 | 350 | q = q.options(FromCache( |
|
351 | 351 | "sql_cache_short", |
|
352 | 352 | "get_user_%s" % _hash_key(username) |
|
353 | 353 | ) |
|
354 | 354 | ) |
|
355 | 355 | return q.scalar() |
|
356 | 356 | |
|
357 | 357 | @classmethod |
|
358 | 358 | def get_by_auth_token(cls, auth_token, cache=False): |
|
359 | 359 | q = cls.query().filter(cls.api_key == auth_token) |
|
360 | 360 | |
|
361 | 361 | if cache: |
|
362 | 362 | q = q.options(FromCache("sql_cache_short", |
|
363 | 363 | "get_auth_token_%s" % auth_token)) |
|
364 | 364 | return q.scalar() |
|
365 | 365 | |
|
366 | 366 | @classmethod |
|
367 | 367 | def get_by_email(cls, email, case_insensitive=False, cache=False): |
|
368 | 368 | if case_insensitive: |
|
369 | 369 | q = cls.query().filter(cls.email.ilike(email)) |
|
370 | 370 | else: |
|
371 | 371 | q = cls.query().filter(cls.email == email) |
|
372 | 372 | |
|
373 | 373 | if cache: |
|
374 | 374 | q = q.options(FromCache("sql_cache_short", |
|
375 | 375 | "get_auth_token_%s" % email)) |
|
376 | 376 | return q.scalar() |
|
377 | 377 | |
|
378 | 378 | def update_lastlogin(self): |
|
379 | 379 | """Update user lastlogin""" |
|
380 | 380 | self.last_login = datetime.datetime.now() |
|
381 | 381 | Session.add(self) |
|
382 |
log.debug('updated user %s lastlogin' |
|
|
382 | log.debug('updated user %s lastlogin', self.username) | |
|
383 | 383 | |
|
384 | 384 | def __json__(self): |
|
385 | 385 | return dict( |
|
386 | 386 | user_id=self.user_id, |
|
387 | 387 | first_name=self.name, |
|
388 | 388 | last_name=self.lastname, |
|
389 | 389 | email=self.email, |
|
390 | 390 | full_name=self.full_name, |
|
391 | 391 | full_name_or_username=self.full_name_or_username, |
|
392 | 392 | short_contact=self.short_contact, |
|
393 | 393 | full_contact=self.full_contact |
|
394 | 394 | ) |
|
395 | 395 | |
|
396 | 396 | |
|
397 | 397 | class UserLog(Base, BaseModel): |
|
398 | 398 | __tablename__ = 'user_logs' |
|
399 | 399 | __table_args__ = ( |
|
400 | 400 | {'extend_existing': True, 'mysql_engine':'InnoDB', |
|
401 | 401 | 'mysql_charset': 'utf8'}, |
|
402 | 402 | ) |
|
403 | 403 | user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
404 | 404 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
405 | 405 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True) |
|
406 | 406 | repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None) |
|
407 | 407 | user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None) |
|
408 | 408 | action = Column("action", String(1200000), nullable=True, unique=None, default=None) |
|
409 | 409 | action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None) |
|
410 | 410 | |
|
411 | 411 | @property |
|
412 | 412 | def action_as_day(self): |
|
413 | 413 | return datetime.date(*self.action_date.timetuple()[:3]) |
|
414 | 414 | |
|
415 | 415 | user = relationship('User') |
|
416 | 416 | repository = relationship('Repository', cascade='') |
|
417 | 417 | |
|
418 | 418 | |
|
419 | 419 | class UserGroup(Base, BaseModel): |
|
420 | 420 | __tablename__ = 'users_groups' |
|
421 | 421 | __table_args__ = ( |
|
422 | 422 | {'extend_existing': True, 'mysql_engine':'InnoDB', |
|
423 | 423 | 'mysql_charset': 'utf8'}, |
|
424 | 424 | ) |
|
425 | 425 | |
|
426 | 426 | users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
427 | 427 | users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None) |
|
428 | 428 | users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None) |
|
429 | 429 | |
|
430 | 430 | members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined") |
|
431 | 431 | users_group_to_perm = relationship('UserGroupToPerm', cascade='all') |
|
432 | 432 | users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all') |
|
433 | 433 | |
|
434 | 434 | def __unicode__(self): |
|
435 | 435 | return u'<userGroup(%s)>' % (self.users_group_name) |
|
436 | 436 | |
|
437 | 437 | @classmethod |
|
438 | 438 | def get_by_group_name(cls, group_name, cache=False, |
|
439 | 439 | case_insensitive=False): |
|
440 | 440 | if case_insensitive: |
|
441 | 441 | q = cls.query().filter(cls.users_group_name.ilike(group_name)) |
|
442 | 442 | else: |
|
443 | 443 | q = cls.query().filter(cls.users_group_name == group_name) |
|
444 | 444 | if cache: |
|
445 | 445 | q = q.options(FromCache( |
|
446 | 446 | "sql_cache_short", |
|
447 | 447 | "get_user_%s" % _hash_key(group_name) |
|
448 | 448 | ) |
|
449 | 449 | ) |
|
450 | 450 | return q.scalar() |
|
451 | 451 | |
|
452 | 452 | @classmethod |
|
453 | 453 | def get(cls, users_group_id, cache=False): |
|
454 | 454 | users_group = cls.query() |
|
455 | 455 | if cache: |
|
456 | 456 | users_group = users_group.options(FromCache("sql_cache_short", |
|
457 | 457 | "get_users_group_%s" % users_group_id)) |
|
458 | 458 | return users_group.get(users_group_id) |
|
459 | 459 | |
|
460 | 460 | |
|
461 | 461 | class UserGroupMember(Base, BaseModel): |
|
462 | 462 | __tablename__ = 'users_groups_members' |
|
463 | 463 | __table_args__ = ( |
|
464 | 464 | {'extend_existing': True, 'mysql_engine':'InnoDB', |
|
465 | 465 | 'mysql_charset': 'utf8'}, |
|
466 | 466 | ) |
|
467 | 467 | |
|
468 | 468 | users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
469 | 469 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
|
470 | 470 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
471 | 471 | |
|
472 | 472 | user = relationship('User', lazy='joined') |
|
473 | 473 | users_group = relationship('UserGroup') |
|
474 | 474 | |
|
475 | 475 | def __init__(self, gr_id='', u_id=''): |
|
476 | 476 | self.users_group_id = gr_id |
|
477 | 477 | self.user_id = u_id |
|
478 | 478 | |
|
479 | 479 | |
|
480 | 480 | class Repository(Base, BaseModel): |
|
481 | 481 | __tablename__ = 'repositories' |
|
482 | 482 | __table_args__ = ( |
|
483 | 483 | UniqueConstraint('repo_name'), |
|
484 | 484 | {'extend_existing': True, 'mysql_engine':'InnoDB', |
|
485 | 485 | 'mysql_charset': 'utf8'}, |
|
486 | 486 | ) |
|
487 | 487 | |
|
488 | 488 | repo_id = Column("repo_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
489 | 489 | repo_name = Column("repo_name", String(255), nullable=False, unique=True, default=None) |
|
490 | 490 | clone_uri = Column("clone_uri", String(255), nullable=True, unique=False, default=None) |
|
491 | 491 | repo_type = Column("repo_type", String(255), nullable=False, unique=False, default='hg') |
|
492 | 492 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None) |
|
493 | 493 | private = Column("private", Boolean(), nullable=True, unique=None, default=None) |
|
494 | 494 | enable_statistics = Column("statistics", Boolean(), nullable=True, unique=None, default=True) |
|
495 | 495 | enable_downloads = Column("downloads", Boolean(), nullable=True, unique=None, default=True) |
|
496 | 496 | description = Column("description", String(10000), nullable=True, unique=None, default=None) |
|
497 | 497 | created_on = Column('created_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now) |
|
498 | 498 | |
|
499 | 499 | fork_id = Column("fork_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=False, default=None) |
|
500 | 500 | group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=False, default=None) |
|
501 | 501 | |
|
502 | 502 | user = relationship('User') |
|
503 | 503 | fork = relationship('Repository', remote_side=repo_id) |
|
504 | 504 | group = relationship('RepoGroup') |
|
505 | 505 | repo_to_perm = relationship('UserRepoToPerm', cascade='all', order_by='UserRepoToPerm.repo_to_perm_id') |
|
506 | 506 | users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all') |
|
507 | 507 | stats = relationship('Statistics', cascade='all', uselist=False) |
|
508 | 508 | |
|
509 | 509 | followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id', cascade='all') |
|
510 | 510 | |
|
511 | 511 | logs = relationship('UserLog') |
|
512 | 512 | |
|
513 | 513 | def __unicode__(self): |
|
514 | 514 | return u"<%s('%s:%s')>" % (self.__class__.__name__,self.repo_id, |
|
515 | 515 | self.repo_name) |
|
516 | 516 | |
|
517 | 517 | @classmethod |
|
518 | 518 | def url_sep(cls): |
|
519 | 519 | return '/' |
|
520 | 520 | |
|
521 | 521 | @classmethod |
|
522 | 522 | def get_by_repo_name(cls, repo_name): |
|
523 | 523 | q = Session.query(cls).filter(cls.repo_name == repo_name) |
|
524 | 524 | q = q.options(joinedload(Repository.fork))\ |
|
525 | 525 | .options(joinedload(Repository.user))\ |
|
526 | 526 | .options(joinedload(Repository.group)) |
|
527 | 527 | return q.scalar() |
|
528 | 528 | |
|
529 | 529 | @classmethod |
|
530 | 530 | def get_repo_forks(cls, repo_id): |
|
531 | 531 | return cls.query().filter(Repository.fork_id == repo_id) |
|
532 | 532 | |
|
533 | 533 | @classmethod |
|
534 | 534 | def base_path(cls): |
|
535 | 535 | """ |
|
536 | 536 | Returns base path when all repos are stored |
|
537 | 537 | |
|
538 | 538 | :param cls: |
|
539 | 539 | """ |
|
540 | 540 | q = Session.query(RhodeCodeUi)\ |
|
541 | 541 | .filter(RhodeCodeUi.ui_key == cls.url_sep()) |
|
542 | 542 | q = q.options(FromCache("sql_cache_short", "repository_repo_path")) |
|
543 | 543 | return q.one().ui_value |
|
544 | 544 | |
|
545 | 545 | @property |
|
546 | 546 | def just_name(self): |
|
547 | 547 | return self.repo_name.split(Repository.url_sep())[-1] |
|
548 | 548 | |
|
549 | 549 | @property |
|
550 | 550 | def groups_with_parents(self): |
|
551 | 551 | groups = [] |
|
552 | 552 | if self.group is None: |
|
553 | 553 | return groups |
|
554 | 554 | |
|
555 | 555 | cur_gr = self.group |
|
556 | 556 | groups.insert(0, cur_gr) |
|
557 | 557 | while 1: |
|
558 | 558 | gr = getattr(cur_gr, 'parent_group', None) |
|
559 | 559 | cur_gr = cur_gr.parent_group |
|
560 | 560 | if gr is None: |
|
561 | 561 | break |
|
562 | 562 | groups.insert(0, gr) |
|
563 | 563 | |
|
564 | 564 | return groups |
|
565 | 565 | |
|
566 | 566 | @property |
|
567 | 567 | def groups_and_repo(self): |
|
568 | 568 | return self.groups_with_parents, self.just_name |
|
569 | 569 | |
|
570 | 570 | @LazyProperty |
|
571 | 571 | def repo_path(self): |
|
572 | 572 | """ |
|
573 | 573 | Returns base full path for that repository means where it actually |
|
574 | 574 | exists on a filesystem |
|
575 | 575 | """ |
|
576 | 576 | q = Session.query(RhodeCodeUi).filter(RhodeCodeUi.ui_key == |
|
577 | 577 | Repository.url_sep()) |
|
578 | 578 | q = q.options(FromCache("sql_cache_short", "repository_repo_path")) |
|
579 | 579 | return q.one().ui_value |
|
580 | 580 | |
|
581 | 581 | @property |
|
582 | 582 | def repo_full_path(self): |
|
583 | 583 | p = [self.repo_path] |
|
584 | 584 | # we need to split the name by / since this is how we store the |
|
585 | 585 | # names in the database, but that eventually needs to be converted |
|
586 | 586 | # into a valid system path |
|
587 | 587 | p += self.repo_name.split(Repository.url_sep()) |
|
588 | 588 | return os.path.join(*p) |
|
589 | 589 | |
|
590 | 590 | def get_new_name(self, repo_name): |
|
591 | 591 | """ |
|
592 | 592 | returns new full repository name based on assigned group and new new |
|
593 | 593 | |
|
594 | 594 | :param group_name: |
|
595 | 595 | """ |
|
596 | 596 | path_prefix = self.group.full_path_splitted if self.group else [] |
|
597 | 597 | return Repository.url_sep().join(path_prefix + [repo_name]) |
|
598 | 598 | |
|
599 | 599 | @property |
|
600 | 600 | def _config(self): |
|
601 | 601 | """ |
|
602 | 602 | Returns db based config object. |
|
603 | 603 | """ |
|
604 | 604 | from rhodecode.lib.utils import make_db_config |
|
605 | 605 | return make_db_config(clear_session=False) |
|
606 | 606 | |
|
607 | 607 | @classmethod |
|
608 | 608 | def is_valid(cls, repo_name): |
|
609 | 609 | """ |
|
610 | 610 | returns True if given repo name is a valid filesystem repository |
|
611 | 611 | |
|
612 | 612 | :param cls: |
|
613 | 613 | :param repo_name: |
|
614 | 614 | """ |
|
615 | 615 | from rhodecode.lib.utils import is_valid_repo |
|
616 | 616 | |
|
617 | 617 | return is_valid_repo(repo_name, cls.base_path()) |
|
618 | 618 | |
|
619 | 619 | #========================================================================== |
|
620 | 620 | # SCM PROPERTIES |
|
621 | 621 | #========================================================================== |
|
622 | 622 | |
|
623 | 623 | def get_commit(self, rev): |
|
624 | 624 | return get_commit_safe(self.scm_instance, rev) |
|
625 | 625 | |
|
626 | 626 | @property |
|
627 | 627 | def tip(self): |
|
628 | 628 | return self.get_commit('tip') |
|
629 | 629 | |
|
630 | 630 | @property |
|
631 | 631 | def author(self): |
|
632 | 632 | return self.tip.author |
|
633 | 633 | |
|
634 | 634 | @property |
|
635 | 635 | def last_change(self): |
|
636 | 636 | return self.scm_instance.last_change |
|
637 | 637 | |
|
638 | 638 | def comments(self, revisions=None): |
|
639 | 639 | """ |
|
640 | 640 | Returns comments for this repository grouped by revisions |
|
641 | 641 | |
|
642 | 642 | :param revisions: filter query by revisions only |
|
643 | 643 | """ |
|
644 | 644 | cmts = ChangesetComment.query()\ |
|
645 | 645 | .filter(ChangesetComment.repo == self) |
|
646 | 646 | if revisions: |
|
647 | 647 | cmts = cmts.filter(ChangesetComment.revision.in_(revisions)) |
|
648 | 648 | grouped = defaultdict(list) |
|
649 | 649 | for cmt in cmts.all(): |
|
650 | 650 | grouped[cmt.revision].append(cmt) |
|
651 | 651 | return grouped |
|
652 | 652 | |
|
653 | 653 | #========================================================================== |
|
654 | 654 | # SCM CACHE INSTANCE |
|
655 | 655 | #========================================================================== |
|
656 | 656 | |
|
657 | 657 | @property |
|
658 | 658 | def invalidate(self): |
|
659 | 659 | return CacheInvalidation.invalidate(self.repo_name) |
|
660 | 660 | |
|
661 | 661 | def set_invalidate(self): |
|
662 | 662 | """ |
|
663 | 663 | set a cache for invalidation for this instance |
|
664 | 664 | """ |
|
665 | 665 | CacheInvalidation.set_invalidate(self.repo_name) |
|
666 | 666 | |
|
667 | 667 | @LazyProperty |
|
668 | 668 | def scm_instance(self): |
|
669 | 669 | return self.__get_instance() |
|
670 | 670 | |
|
671 | 671 | @property |
|
672 | 672 | def scm_instance_cached(self): |
|
673 | 673 | return self.__get_instance() |
|
674 | 674 | |
|
675 | 675 | def __get_instance(self): |
|
676 | 676 | repo_full_path = self.repo_full_path |
|
677 | 677 | try: |
|
678 | 678 | alias = get_scm(repo_full_path)[0] |
|
679 |
log.debug('Creating instance of %s repository' |
|
|
679 | log.debug('Creating instance of %s repository', alias) | |
|
680 | 680 | backend = get_backend(alias) |
|
681 | 681 | except VCSError: |
|
682 | 682 | log.error(traceback.format_exc()) |
|
683 | 683 | log.error('Perhaps this repository is in db and not in ' |
|
684 | 684 | 'filesystem run rescan repositories with ' |
|
685 | 685 | '"destroy old data " option from admin panel') |
|
686 | 686 | return |
|
687 | 687 | |
|
688 | 688 | if alias == 'hg': |
|
689 | 689 | |
|
690 | 690 | repo = backend(safe_str(repo_full_path), create=False, |
|
691 | 691 | config=self._config) |
|
692 | 692 | else: |
|
693 | 693 | repo = backend(repo_full_path, create=False) |
|
694 | 694 | |
|
695 | 695 | return repo |
|
696 | 696 | |
|
697 | 697 | |
|
698 | 698 | class RepoGroup(Base, BaseModel): |
|
699 | 699 | __tablename__ = 'groups' |
|
700 | 700 | __table_args__ = ( |
|
701 | 701 | UniqueConstraint('group_name', 'group_parent_id'), |
|
702 | 702 | CheckConstraint('group_id != group_parent_id'), |
|
703 | 703 | {'extend_existing': True, 'mysql_engine':'InnoDB', |
|
704 | 704 | 'mysql_charset': 'utf8'}, |
|
705 | 705 | ) |
|
706 | 706 | __mapper_args__ = {'order_by': 'group_name'} |
|
707 | 707 | |
|
708 | 708 | group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
709 | 709 | group_name = Column("group_name", String(255), nullable=False, unique=True, default=None) |
|
710 | 710 | group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None) |
|
711 | 711 | group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None) |
|
712 | 712 | |
|
713 | 713 | repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id') |
|
714 | 714 | users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all') |
|
715 | 715 | |
|
716 | 716 | parent_group = relationship('RepoGroup', remote_side=group_id) |
|
717 | 717 | |
|
718 | 718 | def __init__(self, group_name='', parent_group=None): |
|
719 | 719 | self.group_name = group_name |
|
720 | 720 | self.parent_group = parent_group |
|
721 | 721 | |
|
722 | 722 | def __unicode__(self): |
|
723 | 723 | return u"<%s('%s:%s')>" % (self.__class__.__name__, self.group_id, |
|
724 | 724 | self.group_name) |
|
725 | 725 | |
|
726 | 726 | @classmethod |
|
727 | 727 | def url_sep(cls): |
|
728 | 728 | return '/' |
|
729 | 729 | |
|
730 | 730 | @classmethod |
|
731 | 731 | def get_by_group_name(cls, group_name, cache=False, case_insensitive=False): |
|
732 | 732 | if case_insensitive: |
|
733 | 733 | gr = cls.query()\ |
|
734 | 734 | .filter(cls.group_name.ilike(group_name)) |
|
735 | 735 | else: |
|
736 | 736 | gr = cls.query()\ |
|
737 | 737 | .filter(cls.group_name == group_name) |
|
738 | 738 | if cache: |
|
739 | 739 | gr = gr.options(FromCache( |
|
740 | 740 | "sql_cache_short", |
|
741 | 741 | "get_group_%s" % _hash_key(group_name) |
|
742 | 742 | ) |
|
743 | 743 | ) |
|
744 | 744 | return gr.scalar() |
|
745 | 745 | |
|
746 | 746 | @property |
|
747 | 747 | def parents(self): |
|
748 | 748 | parents_recursion_limit = 5 |
|
749 | 749 | groups = [] |
|
750 | 750 | if self.parent_group is None: |
|
751 | 751 | return groups |
|
752 | 752 | cur_gr = self.parent_group |
|
753 | 753 | groups.insert(0, cur_gr) |
|
754 | 754 | cnt = 0 |
|
755 | 755 | while 1: |
|
756 | 756 | cnt += 1 |
|
757 | 757 | gr = getattr(cur_gr, 'parent_group', None) |
|
758 | 758 | cur_gr = cur_gr.parent_group |
|
759 | 759 | if gr is None: |
|
760 | 760 | break |
|
761 | 761 | if cnt == parents_recursion_limit: |
|
762 | 762 | # this will prevent accidental infinit loops |
|
763 |
log.error('group nested more than %s' |
|
|
764 | parents_recursion_limit) | |
|
763 | log.error('group nested more than %s', parents_recursion_limit) | |
|
765 | 764 | break |
|
766 | 765 | |
|
767 | 766 | groups.insert(0, gr) |
|
768 | 767 | return groups |
|
769 | 768 | |
|
770 | 769 | @property |
|
771 | 770 | def children(self): |
|
772 | 771 | return RepoGroup.query().filter(RepoGroup.parent_group == self) |
|
773 | 772 | |
|
774 | 773 | @property |
|
775 | 774 | def name(self): |
|
776 | 775 | return self.group_name.split(RepoGroup.url_sep())[-1] |
|
777 | 776 | |
|
778 | 777 | @property |
|
779 | 778 | def full_path(self): |
|
780 | 779 | return self.group_name |
|
781 | 780 | |
|
782 | 781 | @property |
|
783 | 782 | def full_path_splitted(self): |
|
784 | 783 | return self.group_name.split(RepoGroup.url_sep()) |
|
785 | 784 | |
|
786 | 785 | @property |
|
787 | 786 | def repositories(self): |
|
788 | 787 | return Repository.query()\ |
|
789 | 788 | .filter(Repository.group == self)\ |
|
790 | 789 | .order_by(Repository.repo_name) |
|
791 | 790 | |
|
792 | 791 | @property |
|
793 | 792 | def repositories_recursive_count(self): |
|
794 | 793 | cnt = self.repositories.count() |
|
795 | 794 | |
|
796 | 795 | def children_count(group): |
|
797 | 796 | cnt = 0 |
|
798 | 797 | for child in group.children: |
|
799 | 798 | cnt += child.repositories.count() |
|
800 | 799 | cnt += children_count(child) |
|
801 | 800 | return cnt |
|
802 | 801 | |
|
803 | 802 | return cnt + children_count(self) |
|
804 | 803 | |
|
805 | 804 | def get_new_name(self, group_name): |
|
806 | 805 | """ |
|
807 | 806 | returns new full group name based on parent and new name |
|
808 | 807 | |
|
809 | 808 | :param group_name: |
|
810 | 809 | """ |
|
811 | 810 | path_prefix = (self.parent_group.full_path_splitted if |
|
812 | 811 | self.parent_group else []) |
|
813 | 812 | return RepoGroup.url_sep().join(path_prefix + [group_name]) |
|
814 | 813 | |
|
815 | 814 | |
|
816 | 815 | class Permission(Base, BaseModel): |
|
817 | 816 | __tablename__ = 'permissions' |
|
818 | 817 | __table_args__ = ( |
|
819 | 818 | {'extend_existing': True, 'mysql_engine':'InnoDB', |
|
820 | 819 | 'mysql_charset': 'utf8'}, |
|
821 | 820 | ) |
|
822 | 821 | permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
823 | 822 | permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None) |
|
824 | 823 | permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None) |
|
825 | 824 | |
|
826 | 825 | def __unicode__(self): |
|
827 | 826 | return u"<%s('%s:%s')>" % ( |
|
828 | 827 | self.__class__.__name__, self.permission_id, self.permission_name |
|
829 | 828 | ) |
|
830 | 829 | |
|
831 | 830 | @classmethod |
|
832 | 831 | def get_by_key(cls, key): |
|
833 | 832 | return cls.query().filter(cls.permission_name == key).scalar() |
|
834 | 833 | |
|
835 | 834 | @classmethod |
|
836 | 835 | def get_default_repo_perms(cls, default_user_id): |
|
837 | 836 | q = Session.query(UserRepoToPerm, Repository, cls)\ |
|
838 | 837 | .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\ |
|
839 | 838 | .join((cls, UserRepoToPerm.permission_id == cls.permission_id))\ |
|
840 | 839 | .filter(UserRepoToPerm.user_id == default_user_id) |
|
841 | 840 | |
|
842 | 841 | return q.all() |
|
843 | 842 | |
|
844 | 843 | @classmethod |
|
845 | 844 | def get_default_group_perms(cls, default_user_id): |
|
846 | 845 | q = Session.query(UserRepoGroupToPerm, RepoGroup, cls)\ |
|
847 | 846 | .join((RepoGroup, UserRepoGroupToPerm.group_id == RepoGroup.group_id))\ |
|
848 | 847 | .join((cls, UserRepoGroupToPerm.permission_id == cls.permission_id))\ |
|
849 | 848 | .filter(UserRepoGroupToPerm.user_id == default_user_id) |
|
850 | 849 | |
|
851 | 850 | return q.all() |
|
852 | 851 | |
|
853 | 852 | |
|
854 | 853 | class UserRepoToPerm(Base, BaseModel): |
|
855 | 854 | __tablename__ = 'repo_to_perm' |
|
856 | 855 | __table_args__ = ( |
|
857 | 856 | UniqueConstraint('user_id', 'repository_id', 'permission_id'), |
|
858 | 857 | {'extend_existing': True, 'mysql_engine':'InnoDB', |
|
859 | 858 | 'mysql_charset': 'utf8'} |
|
860 | 859 | ) |
|
861 | 860 | repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
862 | 861 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
863 | 862 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
864 | 863 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) |
|
865 | 864 | |
|
866 | 865 | user = relationship('User') |
|
867 | 866 | repository = relationship('Repository') |
|
868 | 867 | permission = relationship('Permission') |
|
869 | 868 | |
|
870 | 869 | @classmethod |
|
871 | 870 | def create(cls, user, repository, permission): |
|
872 | 871 | n = cls() |
|
873 | 872 | n.user = user |
|
874 | 873 | n.repository = repository |
|
875 | 874 | n.permission = permission |
|
876 | 875 | Session.add(n) |
|
877 | 876 | return n |
|
878 | 877 | |
|
879 | 878 | def __unicode__(self): |
|
880 | 879 | return u'<user:%s => %s >' % (self.user, self.repository) |
|
881 | 880 | |
|
882 | 881 | |
|
883 | 882 | class UserToPerm(Base, BaseModel): |
|
884 | 883 | __tablename__ = 'user_to_perm' |
|
885 | 884 | __table_args__ = ( |
|
886 | 885 | UniqueConstraint('user_id', 'permission_id'), |
|
887 | 886 | {'extend_existing': True, 'mysql_engine':'InnoDB', |
|
888 | 887 | 'mysql_charset': 'utf8'} |
|
889 | 888 | ) |
|
890 | 889 | user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
891 | 890 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
892 | 891 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
893 | 892 | |
|
894 | 893 | user = relationship('User') |
|
895 | 894 | permission = relationship('Permission', lazy='joined') |
|
896 | 895 | |
|
897 | 896 | |
|
898 | 897 | class UserGroupRepoToPerm(Base, BaseModel): |
|
899 | 898 | __tablename__ = 'users_group_repo_to_perm' |
|
900 | 899 | __table_args__ = ( |
|
901 | 900 | UniqueConstraint('repository_id', 'users_group_id', 'permission_id'), |
|
902 | 901 | {'extend_existing': True, 'mysql_engine':'InnoDB', |
|
903 | 902 | 'mysql_charset': 'utf8'} |
|
904 | 903 | ) |
|
905 | 904 | users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
906 | 905 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
|
907 | 906 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
908 | 907 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) |
|
909 | 908 | |
|
910 | 909 | users_group = relationship('UserGroup') |
|
911 | 910 | permission = relationship('Permission') |
|
912 | 911 | repository = relationship('Repository') |
|
913 | 912 | |
|
914 | 913 | @classmethod |
|
915 | 914 | def create(cls, users_group, repository, permission): |
|
916 | 915 | n = cls() |
|
917 | 916 | n.users_group = users_group |
|
918 | 917 | n.repository = repository |
|
919 | 918 | n.permission = permission |
|
920 | 919 | Session.add(n) |
|
921 | 920 | return n |
|
922 | 921 | |
|
923 | 922 | def __unicode__(self): |
|
924 | 923 | return u'<userGroup:%s => %s >' % (self.users_group, self.repository) |
|
925 | 924 | |
|
926 | 925 | |
|
927 | 926 | class UserGroupToPerm(Base, BaseModel): |
|
928 | 927 | __tablename__ = 'users_group_to_perm' |
|
929 | 928 | __table_args__ = ( |
|
930 | 929 | UniqueConstraint('users_group_id', 'permission_id',), |
|
931 | 930 | {'extend_existing': True, 'mysql_engine':'InnoDB', |
|
932 | 931 | 'mysql_charset': 'utf8'} |
|
933 | 932 | ) |
|
934 | 933 | users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
935 | 934 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
|
936 | 935 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
937 | 936 | |
|
938 | 937 | users_group = relationship('UserGroup') |
|
939 | 938 | permission = relationship('Permission') |
|
940 | 939 | |
|
941 | 940 | |
|
942 | 941 | class UserRepoGroupToPerm(Base, BaseModel): |
|
943 | 942 | __tablename__ = 'user_repo_group_to_perm' |
|
944 | 943 | __table_args__ = ( |
|
945 | 944 | UniqueConstraint('user_id', 'group_id', 'permission_id'), |
|
946 | 945 | {'extend_existing': True, 'mysql_engine':'InnoDB', |
|
947 | 946 | 'mysql_charset': 'utf8'} |
|
948 | 947 | ) |
|
949 | 948 | |
|
950 | 949 | group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
951 | 950 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
952 | 951 | group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None) |
|
953 | 952 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
954 | 953 | |
|
955 | 954 | user = relationship('User') |
|
956 | 955 | group = relationship('RepoGroup') |
|
957 | 956 | permission = relationship('Permission') |
|
958 | 957 | |
|
959 | 958 | |
|
960 | 959 | class UserGroupRepoGroupToPerm(Base, BaseModel): |
|
961 | 960 | __tablename__ = 'users_group_repo_group_to_perm' |
|
962 | 961 | __table_args__ = ( |
|
963 | 962 | UniqueConstraint('users_group_id', 'group_id'), |
|
964 | 963 | {'extend_existing': True, 'mysql_engine':'InnoDB', |
|
965 | 964 | 'mysql_charset': 'utf8'} |
|
966 | 965 | ) |
|
967 | 966 | |
|
968 | 967 | users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
969 | 968 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
|
970 | 969 | group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None) |
|
971 | 970 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
972 | 971 | |
|
973 | 972 | users_group = relationship('UserGroup') |
|
974 | 973 | permission = relationship('Permission') |
|
975 | 974 | group = relationship('RepoGroup') |
|
976 | 975 | |
|
977 | 976 | |
|
978 | 977 | class Statistics(Base, BaseModel): |
|
979 | 978 | __tablename__ = 'statistics' |
|
980 | 979 | __table_args__ = ( |
|
981 | 980 | UniqueConstraint('repository_id'), |
|
982 | 981 | {'extend_existing': True, 'mysql_engine':'InnoDB', |
|
983 | 982 | 'mysql_charset': 'utf8'} |
|
984 | 983 | ) |
|
985 | 984 | stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
986 | 985 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None) |
|
987 | 986 | stat_on_revision = Column("stat_on_revision", Integer(), nullable=False) |
|
988 | 987 | commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data |
|
989 | 988 | commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data |
|
990 | 989 | languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data |
|
991 | 990 | |
|
992 | 991 | repository = relationship('Repository', single_parent=True) |
|
993 | 992 | |
|
994 | 993 | |
|
995 | 994 | class UserFollowing(Base, BaseModel): |
|
996 | 995 | __tablename__ = 'user_followings' |
|
997 | 996 | __table_args__ = ( |
|
998 | 997 | UniqueConstraint('user_id', 'follows_repository_id'), |
|
999 | 998 | UniqueConstraint('user_id', 'follows_user_id'), |
|
1000 | 999 | {'extend_existing': True, 'mysql_engine':'InnoDB', |
|
1001 | 1000 | 'mysql_charset': 'utf8'} |
|
1002 | 1001 | ) |
|
1003 | 1002 | |
|
1004 | 1003 | user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
1005 | 1004 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
1006 | 1005 | follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None) |
|
1007 | 1006 | follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) |
|
1008 | 1007 | follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now) |
|
1009 | 1008 | |
|
1010 | 1009 | user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id') |
|
1011 | 1010 | |
|
1012 | 1011 | follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id') |
|
1013 | 1012 | follows_repository = relationship('Repository', order_by='Repository.repo_name') |
|
1014 | 1013 | |
|
1015 | 1014 | @classmethod |
|
1016 | 1015 | def get_repo_followers(cls, repo_id): |
|
1017 | 1016 | return cls.query().filter(cls.follows_repo_id == repo_id) |
|
1018 | 1017 | |
|
1019 | 1018 | |
|
1020 | 1019 | class CacheInvalidation(Base, BaseModel): |
|
1021 | 1020 | __tablename__ = 'cache_invalidation' |
|
1022 | 1021 | __table_args__ = ( |
|
1023 | 1022 | UniqueConstraint('cache_key'), |
|
1024 | 1023 | {'extend_existing': True, 'mysql_engine':'InnoDB', |
|
1025 | 1024 | 'mysql_charset': 'utf8'}, |
|
1026 | 1025 | ) |
|
1027 | 1026 | cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
1028 | 1027 | cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None) |
|
1029 | 1028 | cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None) |
|
1030 | 1029 | cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False) |
|
1031 | 1030 | |
|
1032 | 1031 | def __init__(self, cache_key, cache_args=''): |
|
1033 | 1032 | self.cache_key = cache_key |
|
1034 | 1033 | self.cache_args = cache_args |
|
1035 | 1034 | self.cache_active = False |
|
1036 | 1035 | |
|
1037 | 1036 | def __unicode__(self): |
|
1038 | 1037 | return u"<%s('%s:%s')>" % (self.__class__.__name__, |
|
1039 | 1038 | self.cache_id, self.cache_key) |
|
1040 | 1039 | |
|
1041 | 1040 | @classmethod |
|
1042 | 1041 | def _get_key(cls, key): |
|
1043 | 1042 | """ |
|
1044 | 1043 | Wrapper for generating a key, together with a prefix |
|
1045 | 1044 | |
|
1046 | 1045 | :param key: |
|
1047 | 1046 | """ |
|
1048 | 1047 | import rhodecode |
|
1049 | 1048 | prefix = '' |
|
1050 | 1049 | iid = rhodecode.CONFIG.get('instance_id') |
|
1051 | 1050 | if iid: |
|
1052 | 1051 | prefix = iid |
|
1053 | 1052 | return "%s%s" % (prefix, key), prefix, key.rstrip('_README') |
|
1054 | 1053 | |
|
1055 | 1054 | @classmethod |
|
1056 | 1055 | def get_by_key(cls, key): |
|
1057 | 1056 | return cls.query().filter(cls.cache_key == key).scalar() |
|
1058 | 1057 | |
|
1059 | 1058 | @classmethod |
|
1060 | 1059 | def _get_or_create_key(cls, key, prefix, org_key): |
|
1061 | 1060 | inv_obj = Session.query(cls).filter(cls.cache_key == key).scalar() |
|
1062 | 1061 | if not inv_obj: |
|
1063 | 1062 | try: |
|
1064 | 1063 | inv_obj = CacheInvalidation(key, org_key) |
|
1065 | 1064 | Session.add(inv_obj) |
|
1066 | 1065 | Session.commit() |
|
1067 | 1066 | except Exception: |
|
1068 | 1067 | log.error(traceback.format_exc()) |
|
1069 | 1068 | Session.rollback() |
|
1070 | 1069 | return inv_obj |
|
1071 | 1070 | |
|
1072 | 1071 | @classmethod |
|
1073 | 1072 | def invalidate(cls, key): |
|
1074 | 1073 | """ |
|
1075 | 1074 | Returns Invalidation object if this given key should be invalidated |
|
1076 | 1075 | None otherwise. `cache_active = False` means that this cache |
|
1077 | 1076 | state is not valid and needs to be invalidated |
|
1078 | 1077 | |
|
1079 | 1078 | :param key: |
|
1080 | 1079 | """ |
|
1081 | 1080 | |
|
1082 | 1081 | key, _prefix, _org_key = cls._get_key(key) |
|
1083 | 1082 | inv = cls._get_or_create_key(key, _prefix, _org_key) |
|
1084 | 1083 | |
|
1085 | 1084 | if inv and inv.cache_active is False: |
|
1086 | 1085 | return inv |
|
1087 | 1086 | |
|
1088 | 1087 | @classmethod |
|
1089 | 1088 | def set_invalidate(cls, key): |
|
1090 | 1089 | """ |
|
1091 | 1090 | Mark this Cache key for invalidation |
|
1092 | 1091 | |
|
1093 | 1092 | :param key: |
|
1094 | 1093 | """ |
|
1095 | 1094 | |
|
1096 | 1095 | key, _prefix, _org_key = cls._get_key(key) |
|
1097 | 1096 | inv_objs = Session.query(cls).filter(cls.cache_args == _org_key).all() |
|
1098 |
log.debug('marking %s key[s] %s for invalidation' |
|
|
1099 | _org_key)) | |
|
1097 | log.debug('marking %s key[s] %s for invalidation', len(inv_objs), _org_key) | |
|
1100 | 1098 | try: |
|
1101 | 1099 | for inv_obj in inv_objs: |
|
1102 | 1100 | if inv_obj: |
|
1103 | 1101 | inv_obj.cache_active = False |
|
1104 | 1102 | |
|
1105 | 1103 | Session.add(inv_obj) |
|
1106 | 1104 | Session.commit() |
|
1107 | 1105 | except Exception: |
|
1108 | 1106 | log.error(traceback.format_exc()) |
|
1109 | 1107 | Session.rollback() |
|
1110 | 1108 | |
|
1111 | 1109 | @classmethod |
|
1112 | 1110 | def set_valid(cls, key): |
|
1113 | 1111 | """ |
|
1114 | 1112 | Mark this cache key as active and currently cached |
|
1115 | 1113 | |
|
1116 | 1114 | :param key: |
|
1117 | 1115 | """ |
|
1118 | 1116 | inv_obj = cls.get_by_key(key) |
|
1119 | 1117 | inv_obj.cache_active = True |
|
1120 | 1118 | Session.add(inv_obj) |
|
1121 | 1119 | Session.commit() |
|
1122 | 1120 | |
|
1123 | 1121 | |
|
1124 | 1122 | class ChangesetComment(Base, BaseModel): |
|
1125 | 1123 | __tablename__ = 'changeset_comments' |
|
1126 | 1124 | __table_args__ = ( |
|
1127 | 1125 | {'extend_existing': True, 'mysql_engine':'InnoDB', |
|
1128 | 1126 | 'mysql_charset': 'utf8'}, |
|
1129 | 1127 | ) |
|
1130 | 1128 | comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True) |
|
1131 | 1129 | repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False) |
|
1132 | 1130 | revision = Column('revision', String(40), nullable=False) |
|
1133 | 1131 | line_no = Column('line_no', Unicode(10), nullable=True) |
|
1134 | 1132 | f_path = Column('f_path', Unicode(1000), nullable=True) |
|
1135 | 1133 | user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False) |
|
1136 | 1134 | text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False) |
|
1137 | 1135 | modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now) |
|
1138 | 1136 | |
|
1139 | 1137 | author = relationship('User', lazy='joined') |
|
1140 | 1138 | repo = relationship('Repository') |
|
1141 | 1139 | |
|
1142 | 1140 | @classmethod |
|
1143 | 1141 | def get_users(cls, revision): |
|
1144 | 1142 | """ |
|
1145 | 1143 | Returns user associated with this changesetComment. ie those |
|
1146 | 1144 | who actually commented |
|
1147 | 1145 | |
|
1148 | 1146 | :param cls: |
|
1149 | 1147 | :param revision: |
|
1150 | 1148 | """ |
|
1151 | 1149 | return Session.query(User)\ |
|
1152 | 1150 | .filter(cls.revision == revision)\ |
|
1153 | 1151 | .join(ChangesetComment.author).all() |
|
1154 | 1152 | |
|
1155 | 1153 | |
|
1156 | 1154 | class Notification(Base, BaseModel): |
|
1157 | 1155 | __tablename__ = 'notifications' |
|
1158 | 1156 | __table_args__ = ( |
|
1159 | 1157 | {'extend_existing': True, 'mysql_engine':'InnoDB', |
|
1160 | 1158 | 'mysql_charset': 'utf8'}, |
|
1161 | 1159 | ) |
|
1162 | 1160 | |
|
1163 | 1161 | TYPE_CHANGESET_COMMENT = u'cs_comment' |
|
1164 | 1162 | TYPE_MESSAGE = u'message' |
|
1165 | 1163 | TYPE_MENTION = u'mention' |
|
1166 | 1164 | TYPE_REGISTRATION = u'registration' |
|
1167 | 1165 | |
|
1168 | 1166 | notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True) |
|
1169 | 1167 | subject = Column('subject', Unicode(512), nullable=True) |
|
1170 | 1168 | body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True) |
|
1171 | 1169 | created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True) |
|
1172 | 1170 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
1173 | 1171 | type_ = Column('type', Unicode(256)) |
|
1174 | 1172 | |
|
1175 | 1173 | created_by_user = relationship('User') |
|
1176 | 1174 | notifications_to_users = relationship('UserNotification', lazy='joined', |
|
1177 | 1175 | cascade="all, delete, delete-orphan") |
|
1178 | 1176 | |
|
1179 | 1177 | @property |
|
1180 | 1178 | def recipients(self): |
|
1181 | 1179 | return [x.user for x in UserNotification.query()\ |
|
1182 | 1180 | .filter(UserNotification.notification == self).all()] |
|
1183 | 1181 | |
|
1184 | 1182 | @classmethod |
|
1185 | 1183 | def create(cls, created_by, subject, body, recipients, type_=None): |
|
1186 | 1184 | if type_ is None: |
|
1187 | 1185 | type_ = Notification.TYPE_MESSAGE |
|
1188 | 1186 | |
|
1189 | 1187 | notification = cls() |
|
1190 | 1188 | notification.created_by_user = created_by |
|
1191 | 1189 | notification.subject = subject |
|
1192 | 1190 | notification.body = body |
|
1193 | 1191 | notification.type_ = type_ |
|
1194 | 1192 | notification.created_on = datetime.datetime.now() |
|
1195 | 1193 | |
|
1196 | 1194 | for u in recipients: |
|
1197 | 1195 | assoc = UserNotification() |
|
1198 | 1196 | assoc.notification = notification |
|
1199 | 1197 | u.notifications.append(assoc) |
|
1200 | 1198 | Session.add(notification) |
|
1201 | 1199 | return notification |
|
1202 | 1200 | |
|
1203 | 1201 | @property |
|
1204 | 1202 | def description(self): |
|
1205 | 1203 | from rhodecode.model.notification import NotificationModel |
|
1206 | 1204 | return NotificationModel().make_description(self) |
|
1207 | 1205 | |
|
1208 | 1206 | |
|
1209 | 1207 | class UserNotification(Base, BaseModel): |
|
1210 | 1208 | __tablename__ = 'user_to_notification' |
|
1211 | 1209 | __table_args__ = ( |
|
1212 | 1210 | UniqueConstraint('user_id', 'notification_id'), |
|
1213 | 1211 | {'extend_existing': True, 'mysql_engine':'InnoDB', |
|
1214 | 1212 | 'mysql_charset': 'utf8'} |
|
1215 | 1213 | ) |
|
1216 | 1214 | user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True) |
|
1217 | 1215 | notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True) |
|
1218 | 1216 | read = Column('read', Boolean, default=False) |
|
1219 | 1217 | sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None) |
|
1220 | 1218 | |
|
1221 | 1219 | user = relationship('User', lazy="joined") |
|
1222 | 1220 | notification = relationship('Notification', lazy="joined", |
|
1223 | 1221 | order_by=lambda: Notification.created_on.desc(),) |
|
1224 | 1222 | |
|
1225 | 1223 | def mark_as_read(self): |
|
1226 | 1224 | self.read = True |
|
1227 | 1225 | Session.add(self) |
|
1228 | 1226 | |
|
1229 | 1227 | |
|
1230 | 1228 | class DbMigrateVersion(Base, BaseModel): |
|
1231 | 1229 | __tablename__ = 'db_migrate_version' |
|
1232 | 1230 | __table_args__ = ( |
|
1233 | 1231 | {'extend_existing': True, 'mysql_engine':'InnoDB', |
|
1234 | 1232 | 'mysql_charset': 'utf8'}, |
|
1235 | 1233 | ) |
|
1236 | 1234 | repository_id = Column('repository_id', String(250), primary_key=True) |
|
1237 | 1235 | repository_path = Column('repository_path', Text) |
|
1238 | 1236 | version = Column('version', Integer) |
|
1239 | 1237 | |
|
1240 | 1238 | ## this is migration from 1_4_0, but now it's here to overcome a problem of |
|
1241 | 1239 | ## attaching a FK to this from 1_3_0 ! |
|
1242 | 1240 | |
|
1243 | 1241 | |
|
1244 | 1242 | class PullRequest(Base, BaseModel): |
|
1245 | 1243 | __tablename__ = 'pull_requests' |
|
1246 | 1244 | __table_args__ = ( |
|
1247 | 1245 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
1248 | 1246 | 'mysql_charset': 'utf8'}, |
|
1249 | 1247 | ) |
|
1250 | 1248 | |
|
1251 | 1249 | STATUS_NEW = u'new' |
|
1252 | 1250 | STATUS_OPEN = u'open' |
|
1253 | 1251 | STATUS_CLOSED = u'closed' |
|
1254 | 1252 | |
|
1255 | 1253 | pull_request_id = Column('pull_request_id', Integer(), nullable=False, primary_key=True) |
|
1256 | 1254 | title = Column('title', Unicode(256), nullable=True) |
|
1257 | 1255 | description = Column('description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=True) |
|
1258 | 1256 | status = Column('status', Unicode(256), nullable=False, default=STATUS_NEW) |
|
1259 | 1257 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
1260 | 1258 | updated_on = Column('updated_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
1261 | 1259 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None) |
|
1262 | 1260 | _revisions = Column('revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql')) # 500 revisions max |
|
1263 | 1261 | org_repo_id = Column('org_repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False) |
|
1264 | 1262 | org_ref = Column('org_ref', Unicode(256), nullable=False) |
|
1265 | 1263 | other_repo_id = Column('other_repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False) |
|
1266 | 1264 | other_ref = Column('other_ref', Unicode(256), nullable=False) |
@@ -1,1087 +1,1085 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2018 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import os |
|
22 | 22 | import time |
|
23 | 23 | import logging |
|
24 | 24 | import datetime |
|
25 | 25 | import traceback |
|
26 | 26 | import hashlib |
|
27 | 27 | import collections |
|
28 | 28 | |
|
29 | 29 | from sqlalchemy import * |
|
30 | 30 | from sqlalchemy.ext.hybrid import hybrid_property |
|
31 | 31 | from sqlalchemy.orm import relationship, joinedload, class_mapper, validates |
|
32 | 32 | from sqlalchemy.exc import DatabaseError |
|
33 | 33 | from beaker.cache import cache_region, region_invalidate |
|
34 | 34 | from webob.exc import HTTPNotFound |
|
35 | 35 | |
|
36 | 36 | from rhodecode.translation import _ |
|
37 | 37 | |
|
38 | 38 | from rhodecode.lib.vcs import get_backend |
|
39 | 39 | from rhodecode.lib.vcs.utils.helpers import get_scm |
|
40 | 40 | from rhodecode.lib.vcs.exceptions import VCSError |
|
41 | 41 | from zope.cachedescriptors.property import Lazy as LazyProperty |
|
42 | 42 | from rhodecode.lib.vcs.backends.base import EmptyCommit |
|
43 | 43 | |
|
44 | 44 | from rhodecode.lib.utils2 import str2bool, safe_str, get_commit_safe, \ |
|
45 | 45 | safe_unicode, remove_suffix, remove_prefix, time_to_datetime |
|
46 | 46 | from rhodecode.lib.ext_json import json |
|
47 | 47 | from rhodecode.lib.caching_query import FromCache |
|
48 | 48 | |
|
49 | 49 | from rhodecode.model.meta import Base, Session |
|
50 | 50 | |
|
51 | 51 | URL_SEP = '/' |
|
52 | 52 | log = logging.getLogger(__name__) |
|
53 | 53 | |
|
54 | 54 | #============================================================================== |
|
55 | 55 | # BASE CLASSES |
|
56 | 56 | #============================================================================== |
|
57 | 57 | |
|
58 | 58 | _hash_key = lambda k: hashlib.md5(safe_str(k)).hexdigest() |
|
59 | 59 | |
|
60 | 60 | |
|
61 | 61 | class BaseModel(object): |
|
62 | 62 | """ |
|
63 | 63 | Base Model for all classes |
|
64 | 64 | """ |
|
65 | 65 | |
|
66 | 66 | @classmethod |
|
67 | 67 | def _get_keys(cls): |
|
68 | 68 | """return column names for this model """ |
|
69 | 69 | return class_mapper(cls).c.keys() |
|
70 | 70 | |
|
71 | 71 | def get_dict(self): |
|
72 | 72 | """ |
|
73 | 73 | return dict with keys and values corresponding |
|
74 | 74 | to this model data """ |
|
75 | 75 | |
|
76 | 76 | d = {} |
|
77 | 77 | for k in self._get_keys(): |
|
78 | 78 | d[k] = getattr(self, k) |
|
79 | 79 | |
|
80 | 80 | # also use __json__() if present to get additional fields |
|
81 | 81 | _json_attr = getattr(self, '__json__', None) |
|
82 | 82 | if _json_attr: |
|
83 | 83 | # update with attributes from __json__ |
|
84 | 84 | if callable(_json_attr): |
|
85 | 85 | _json_attr = _json_attr() |
|
86 | 86 | for k, val in _json_attr.iteritems(): |
|
87 | 87 | d[k] = val |
|
88 | 88 | return d |
|
89 | 89 | |
|
90 | 90 | def get_appstruct(self): |
|
91 | 91 | """return list with keys and values tupples corresponding |
|
92 | 92 | to this model data """ |
|
93 | 93 | |
|
94 | 94 | l = [] |
|
95 | 95 | for k in self._get_keys(): |
|
96 | 96 | l.append((k, getattr(self, k),)) |
|
97 | 97 | return l |
|
98 | 98 | |
|
99 | 99 | def populate_obj(self, populate_dict): |
|
100 | 100 | """populate model with data from given populate_dict""" |
|
101 | 101 | |
|
102 | 102 | for k in self._get_keys(): |
|
103 | 103 | if k in populate_dict: |
|
104 | 104 | setattr(self, k, populate_dict[k]) |
|
105 | 105 | |
|
106 | 106 | @classmethod |
|
107 | 107 | def query(cls): |
|
108 | 108 | return Session().query(cls) |
|
109 | 109 | |
|
110 | 110 | @classmethod |
|
111 | 111 | def get(cls, id_): |
|
112 | 112 | if id_: |
|
113 | 113 | return cls.query().get(id_) |
|
114 | 114 | |
|
115 | 115 | @classmethod |
|
116 | 116 | def get_or_404(cls, id_): |
|
117 | 117 | try: |
|
118 | 118 | id_ = int(id_) |
|
119 | 119 | except (TypeError, ValueError): |
|
120 | 120 | raise HTTPNotFound |
|
121 | 121 | |
|
122 | 122 | res = cls.query().get(id_) |
|
123 | 123 | if not res: |
|
124 | 124 | raise HTTPNotFound |
|
125 | 125 | return res |
|
126 | 126 | |
|
127 | 127 | @classmethod |
|
128 | 128 | def getAll(cls): |
|
129 | 129 | # deprecated and left for backward compatibility |
|
130 | 130 | return cls.get_all() |
|
131 | 131 | |
|
132 | 132 | @classmethod |
|
133 | 133 | def get_all(cls): |
|
134 | 134 | return cls.query().all() |
|
135 | 135 | |
|
136 | 136 | @classmethod |
|
137 | 137 | def delete(cls, id_): |
|
138 | 138 | obj = cls.query().get(id_) |
|
139 | 139 | Session().delete(obj) |
|
140 | 140 | |
|
141 | 141 | def __repr__(self): |
|
142 | 142 | if hasattr(self, '__unicode__'): |
|
143 | 143 | # python repr needs to return str |
|
144 | 144 | return safe_str(self.__unicode__()) |
|
145 | 145 | return '<DB:%s>' % (self.__class__.__name__) |
|
146 | 146 | |
|
147 | 147 | |
|
148 | 148 | class RhodeCodeSetting(Base, BaseModel): |
|
149 | 149 | __tablename__ = 'rhodecode_settings' |
|
150 | 150 | __table_args__ = ( |
|
151 | 151 | UniqueConstraint('app_settings_name'), |
|
152 | 152 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
153 | 153 | 'mysql_charset': 'utf8'} |
|
154 | 154 | ) |
|
155 | 155 | app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
156 | 156 | app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None) |
|
157 | 157 | _app_settings_value = Column("app_settings_value", String(255), nullable=True, unique=None, default=None) |
|
158 | 158 | |
|
159 | 159 | def __init__(self, k='', v=''): |
|
160 | 160 | self.app_settings_name = k |
|
161 | 161 | self.app_settings_value = v |
|
162 | 162 | |
|
163 | 163 | @validates('_app_settings_value') |
|
164 | 164 | def validate_settings_value(self, key, val): |
|
165 | 165 | assert type(val) == unicode |
|
166 | 166 | return val |
|
167 | 167 | |
|
168 | 168 | @hybrid_property |
|
169 | 169 | def app_settings_value(self): |
|
170 | 170 | v = self._app_settings_value |
|
171 | 171 | if self.app_settings_name in ["ldap_active", |
|
172 | 172 | "default_repo_enable_statistics", |
|
173 | 173 | "default_repo_enable_locking", |
|
174 | 174 | "default_repo_private", |
|
175 | 175 | "default_repo_enable_downloads"]: |
|
176 | 176 | v = str2bool(v) |
|
177 | 177 | return v |
|
178 | 178 | |
|
179 | 179 | @app_settings_value.setter |
|
180 | 180 | def app_settings_value(self, val): |
|
181 | 181 | """ |
|
182 | 182 | Setter that will always make sure we use unicode in app_settings_value |
|
183 | 183 | |
|
184 | 184 | :param val: |
|
185 | 185 | """ |
|
186 | 186 | self._app_settings_value = safe_unicode(val) |
|
187 | 187 | |
|
188 | 188 | def __unicode__(self): |
|
189 | 189 | return u"<%s('%s:%s')>" % ( |
|
190 | 190 | self.__class__.__name__, |
|
191 | 191 | self.app_settings_name, self.app_settings_value |
|
192 | 192 | ) |
|
193 | 193 | |
|
194 | 194 | |
|
195 | 195 | class RhodeCodeUi(Base, BaseModel): |
|
196 | 196 | __tablename__ = 'rhodecode_ui' |
|
197 | 197 | __table_args__ = ( |
|
198 | 198 | UniqueConstraint('ui_key'), |
|
199 | 199 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
200 | 200 | 'mysql_charset': 'utf8'} |
|
201 | 201 | ) |
|
202 | 202 | |
|
203 | 203 | HOOK_REPO_SIZE = 'changegroup.repo_size' |
|
204 | 204 | HOOK_PUSH = 'changegroup.push_logger' |
|
205 | 205 | HOOK_PRE_PUSH = 'prechangegroup.pre_push' |
|
206 | 206 | HOOK_PULL = 'outgoing.pull_logger' |
|
207 | 207 | HOOK_PRE_PULL = 'preoutgoing.pre_pull' |
|
208 | 208 | |
|
209 | 209 | ui_id = Column("ui_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
210 | 210 | ui_section = Column("ui_section", String(255), nullable=True, unique=None, default=None) |
|
211 | 211 | ui_key = Column("ui_key", String(255), nullable=True, unique=None, default=None) |
|
212 | 212 | ui_value = Column("ui_value", String(255), nullable=True, unique=None, default=None) |
|
213 | 213 | ui_active = Column("ui_active", Boolean(), nullable=True, unique=None, default=True) |
|
214 | 214 | |
|
215 | 215 | |
|
216 | 216 | |
|
217 | 217 | class User(Base, BaseModel): |
|
218 | 218 | __tablename__ = 'users' |
|
219 | 219 | __table_args__ = ( |
|
220 | 220 | UniqueConstraint('username'), UniqueConstraint('email'), |
|
221 | 221 | Index('u_username_idx', 'username'), |
|
222 | 222 | Index('u_email_idx', 'email'), |
|
223 | 223 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
224 | 224 | 'mysql_charset': 'utf8'} |
|
225 | 225 | ) |
|
226 | 226 | DEFAULT_USER = 'default' |
|
227 | 227 | DEFAULT_PERMISSIONS = [ |
|
228 | 228 | 'hg.register.manual_activate', 'hg.create.repository', |
|
229 | 229 | 'hg.fork.repository', 'repository.read', 'group.read' |
|
230 | 230 | ] |
|
231 | 231 | user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
232 | 232 | username = Column("username", String(255), nullable=True, unique=None, default=None) |
|
233 | 233 | password = Column("password", String(255), nullable=True, unique=None, default=None) |
|
234 | 234 | active = Column("active", Boolean(), nullable=True, unique=None, default=True) |
|
235 | 235 | admin = Column("admin", Boolean(), nullable=True, unique=None, default=False) |
|
236 | 236 | name = Column("firstname", String(255), nullable=True, unique=None, default=None) |
|
237 | 237 | lastname = Column("lastname", String(255), nullable=True, unique=None, default=None) |
|
238 | 238 | _email = Column("email", String(255), nullable=True, unique=None, default=None) |
|
239 | 239 | last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None) |
|
240 | 240 | ldap_dn = Column("ldap_dn", String(255), nullable=True, unique=None, default=None) |
|
241 | 241 | api_key = Column("api_key", String(255), nullable=True, unique=None, default=None) |
|
242 | 242 | inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True) |
|
243 | 243 | |
|
244 | 244 | user_log = relationship('UserLog') |
|
245 | 245 | user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all') |
|
246 | 246 | |
|
247 | 247 | repositories = relationship('Repository') |
|
248 | 248 | user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all') |
|
249 | 249 | followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all') |
|
250 | 250 | |
|
251 | 251 | repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all') |
|
252 | 252 | repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all') |
|
253 | 253 | |
|
254 | 254 | group_member = relationship('UserGroupMember', cascade='all') |
|
255 | 255 | |
|
256 | 256 | notifications = relationship('UserNotification', cascade='all') |
|
257 | 257 | # notifications assigned to this user |
|
258 | 258 | user_created_notifications = relationship('Notification', cascade='all') |
|
259 | 259 | # comments created by this user |
|
260 | 260 | user_comments = relationship('ChangesetComment', cascade='all') |
|
261 | 261 | user_emails = relationship('UserEmailMap', cascade='all') |
|
262 | 262 | |
|
263 | 263 | @hybrid_property |
|
264 | 264 | def email(self): |
|
265 | 265 | return self._email |
|
266 | 266 | |
|
267 | 267 | @email.setter |
|
268 | 268 | def email(self, val): |
|
269 | 269 | self._email = val.lower() if val else None |
|
270 | 270 | |
|
271 | 271 | @property |
|
272 | 272 | def firstname(self): |
|
273 | 273 | # alias for future |
|
274 | 274 | return self.name |
|
275 | 275 | |
|
276 | 276 | @property |
|
277 | 277 | def username_and_name(self): |
|
278 | 278 | return '%s (%s %s)' % (self.username, self.firstname, self.lastname) |
|
279 | 279 | |
|
280 | 280 | @property |
|
281 | 281 | def full_name(self): |
|
282 | 282 | return '%s %s' % (self.firstname, self.lastname) |
|
283 | 283 | |
|
284 | 284 | @property |
|
285 | 285 | def full_contact(self): |
|
286 | 286 | return '%s %s <%s>' % (self.firstname, self.lastname, self.email) |
|
287 | 287 | |
|
288 | 288 | @property |
|
289 | 289 | def short_contact(self): |
|
290 | 290 | return '%s %s' % (self.firstname, self.lastname) |
|
291 | 291 | |
|
292 | 292 | @property |
|
293 | 293 | def is_admin(self): |
|
294 | 294 | return self.admin |
|
295 | 295 | |
|
296 | 296 | @classmethod |
|
297 | 297 | def get_by_username(cls, username, case_insensitive=False, cache=False): |
|
298 | 298 | if case_insensitive: |
|
299 | 299 | q = cls.query().filter(cls.username.ilike(username)) |
|
300 | 300 | else: |
|
301 | 301 | q = cls.query().filter(cls.username == username) |
|
302 | 302 | |
|
303 | 303 | if cache: |
|
304 | 304 | q = q.options(FromCache( |
|
305 | 305 | "sql_cache_short", |
|
306 | 306 | "get_user_%s" % _hash_key(username) |
|
307 | 307 | ) |
|
308 | 308 | ) |
|
309 | 309 | return q.scalar() |
|
310 | 310 | |
|
311 | 311 | @classmethod |
|
312 | 312 | def get_by_auth_token(cls, auth_token, cache=False): |
|
313 | 313 | q = cls.query().filter(cls.api_key == auth_token) |
|
314 | 314 | |
|
315 | 315 | if cache: |
|
316 | 316 | q = q.options(FromCache("sql_cache_short", |
|
317 | 317 | "get_auth_token_%s" % auth_token)) |
|
318 | 318 | return q.scalar() |
|
319 | 319 | |
|
320 | 320 | @classmethod |
|
321 | 321 | def get_by_email(cls, email, case_insensitive=False, cache=False): |
|
322 | 322 | if case_insensitive: |
|
323 | 323 | q = cls.query().filter(cls.email.ilike(email)) |
|
324 | 324 | else: |
|
325 | 325 | q = cls.query().filter(cls.email == email) |
|
326 | 326 | |
|
327 | 327 | if cache: |
|
328 | 328 | q = q.options(FromCache("sql_cache_short", |
|
329 | 329 | "get_email_key_%s" % email)) |
|
330 | 330 | |
|
331 | 331 | ret = q.scalar() |
|
332 | 332 | if ret is None: |
|
333 | 333 | q = UserEmailMap.query() |
|
334 | 334 | # try fetching in alternate email map |
|
335 | 335 | if case_insensitive: |
|
336 | 336 | q = q.filter(UserEmailMap.email.ilike(email)) |
|
337 | 337 | else: |
|
338 | 338 | q = q.filter(UserEmailMap.email == email) |
|
339 | 339 | q = q.options(joinedload(UserEmailMap.user)) |
|
340 | 340 | if cache: |
|
341 | 341 | q = q.options(FromCache("sql_cache_short", |
|
342 | 342 | "get_email_map_key_%s" % email)) |
|
343 | 343 | ret = getattr(q.scalar(), 'user', None) |
|
344 | 344 | |
|
345 | 345 | return ret |
|
346 | 346 | |
|
347 | 347 | |
|
348 | 348 | class UserEmailMap(Base, BaseModel): |
|
349 | 349 | __tablename__ = 'user_email_map' |
|
350 | 350 | __table_args__ = ( |
|
351 | 351 | Index('uem_email_idx', 'email'), |
|
352 | 352 | UniqueConstraint('email'), |
|
353 | 353 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
354 | 354 | 'mysql_charset': 'utf8'} |
|
355 | 355 | ) |
|
356 | 356 | __mapper_args__ = {} |
|
357 | 357 | |
|
358 | 358 | email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
359 | 359 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) |
|
360 | 360 | _email = Column("email", String(255), nullable=True, unique=False, default=None) |
|
361 | 361 | user = relationship('User', lazy='joined') |
|
362 | 362 | |
|
363 | 363 | @validates('_email') |
|
364 | 364 | def validate_email(self, key, email): |
|
365 | 365 | # check if this email is not main one |
|
366 | 366 | main_email = Session().query(User).filter(User.email == email).scalar() |
|
367 | 367 | if main_email is not None: |
|
368 | 368 | raise AttributeError('email %s is present is user table' % email) |
|
369 | 369 | return email |
|
370 | 370 | |
|
371 | 371 | @hybrid_property |
|
372 | 372 | def email(self): |
|
373 | 373 | return self._email |
|
374 | 374 | |
|
375 | 375 | @email.setter |
|
376 | 376 | def email(self, val): |
|
377 | 377 | self._email = val.lower() if val else None |
|
378 | 378 | |
|
379 | 379 | |
|
380 | 380 | class UserIpMap(Base, BaseModel): |
|
381 | 381 | __tablename__ = 'user_ip_map' |
|
382 | 382 | __table_args__ = ( |
|
383 | 383 | UniqueConstraint('user_id', 'ip_addr'), |
|
384 | 384 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
385 | 385 | 'mysql_charset': 'utf8'} |
|
386 | 386 | ) |
|
387 | 387 | __mapper_args__ = {} |
|
388 | 388 | |
|
389 | 389 | ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
390 | 390 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) |
|
391 | 391 | ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None) |
|
392 | 392 | active = Column("active", Boolean(), nullable=True, unique=None, default=True) |
|
393 | 393 | user = relationship('User', lazy='joined') |
|
394 | 394 | |
|
395 | 395 | |
|
396 | 396 | class UserLog(Base, BaseModel): |
|
397 | 397 | __tablename__ = 'user_logs' |
|
398 | 398 | __table_args__ = ( |
|
399 | 399 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
400 | 400 | 'mysql_charset': 'utf8'}, |
|
401 | 401 | ) |
|
402 | 402 | user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
403 | 403 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) |
|
404 | 404 | username = Column("username", String(255), nullable=True, unique=None, default=None) |
|
405 | 405 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True) |
|
406 | 406 | repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None) |
|
407 | 407 | user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None) |
|
408 | 408 | action = Column("action", String(1200000), nullable=True, unique=None, default=None) |
|
409 | 409 | action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None) |
|
410 | 410 | |
|
411 | 411 | |
|
412 | 412 | user = relationship('User') |
|
413 | 413 | repository = relationship('Repository', cascade='') |
|
414 | 414 | |
|
415 | 415 | |
|
416 | 416 | class UserGroup(Base, BaseModel): |
|
417 | 417 | __tablename__ = 'users_groups' |
|
418 | 418 | __table_args__ = ( |
|
419 | 419 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
420 | 420 | 'mysql_charset': 'utf8'}, |
|
421 | 421 | ) |
|
422 | 422 | |
|
423 | 423 | users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
424 | 424 | users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None) |
|
425 | 425 | users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None) |
|
426 | 426 | inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True) |
|
427 | 427 | |
|
428 | 428 | members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined") |
|
429 | 429 | users_group_to_perm = relationship('UserGroupToPerm', cascade='all') |
|
430 | 430 | users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all') |
|
431 | 431 | |
|
432 | 432 | def __unicode__(self): |
|
433 | 433 | return u'<userGroup(%s)>' % (self.users_group_name) |
|
434 | 434 | |
|
435 | 435 | @classmethod |
|
436 | 436 | def get_by_group_name(cls, group_name, cache=False, |
|
437 | 437 | case_insensitive=False): |
|
438 | 438 | if case_insensitive: |
|
439 | 439 | q = cls.query().filter(cls.users_group_name.ilike(group_name)) |
|
440 | 440 | else: |
|
441 | 441 | q = cls.query().filter(cls.users_group_name == group_name) |
|
442 | 442 | if cache: |
|
443 | 443 | q = q.options(FromCache( |
|
444 | 444 | "sql_cache_short", |
|
445 | 445 | "get_user_%s" % _hash_key(group_name) |
|
446 | 446 | ) |
|
447 | 447 | ) |
|
448 | 448 | return q.scalar() |
|
449 | 449 | |
|
450 | 450 | @classmethod |
|
451 | 451 | def get(cls, users_group_id, cache=False): |
|
452 | 452 | user_group = cls.query() |
|
453 | 453 | if cache: |
|
454 | 454 | user_group = user_group.options(FromCache("sql_cache_short", |
|
455 | 455 | "get_users_group_%s" % users_group_id)) |
|
456 | 456 | return user_group.get(users_group_id) |
|
457 | 457 | |
|
458 | 458 | |
|
459 | 459 | class UserGroupMember(Base, BaseModel): |
|
460 | 460 | __tablename__ = 'users_groups_members' |
|
461 | 461 | __table_args__ = ( |
|
462 | 462 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
463 | 463 | 'mysql_charset': 'utf8'}, |
|
464 | 464 | ) |
|
465 | 465 | |
|
466 | 466 | users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
467 | 467 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
|
468 | 468 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
469 | 469 | |
|
470 | 470 | user = relationship('User', lazy='joined') |
|
471 | 471 | users_group = relationship('UserGroup') |
|
472 | 472 | |
|
473 | 473 | def __init__(self, gr_id='', u_id=''): |
|
474 | 474 | self.users_group_id = gr_id |
|
475 | 475 | self.user_id = u_id |
|
476 | 476 | |
|
477 | 477 | |
|
478 | 478 | class RepositoryField(Base, BaseModel): |
|
479 | 479 | __tablename__ = 'repositories_fields' |
|
480 | 480 | __table_args__ = ( |
|
481 | 481 | UniqueConstraint('repository_id', 'field_key'), # no-multi field |
|
482 | 482 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
483 | 483 | 'mysql_charset': 'utf8'}, |
|
484 | 484 | ) |
|
485 | 485 | PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields |
|
486 | 486 | |
|
487 | 487 | repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
488 | 488 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) |
|
489 | 489 | field_key = Column("field_key", String(250)) |
|
490 | 490 | field_label = Column("field_label", String(1024), nullable=False) |
|
491 | 491 | field_value = Column("field_value", String(10000), nullable=False) |
|
492 | 492 | field_desc = Column("field_desc", String(1024), nullable=False) |
|
493 | 493 | field_type = Column("field_type", String(256), nullable=False, unique=None) |
|
494 | 494 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
495 | 495 | |
|
496 | 496 | repository = relationship('Repository') |
|
497 | 497 | |
|
498 | 498 | @classmethod |
|
499 | 499 | def get_by_key_name(cls, key, repo): |
|
500 | 500 | row = cls.query()\ |
|
501 | 501 | .filter(cls.repository == repo)\ |
|
502 | 502 | .filter(cls.field_key == key).scalar() |
|
503 | 503 | return row |
|
504 | 504 | |
|
505 | 505 | |
|
506 | 506 | class Repository(Base, BaseModel): |
|
507 | 507 | __tablename__ = 'repositories' |
|
508 | 508 | __table_args__ = ( |
|
509 | 509 | UniqueConstraint('repo_name'), |
|
510 | 510 | Index('r_repo_name_idx', 'repo_name'), |
|
511 | 511 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
512 | 512 | 'mysql_charset': 'utf8'}, |
|
513 | 513 | ) |
|
514 | 514 | |
|
515 | 515 | repo_id = Column("repo_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
516 | 516 | repo_name = Column("repo_name", String(255), nullable=False, unique=True, default=None) |
|
517 | 517 | clone_uri = Column("clone_uri", String(255), nullable=True, unique=False, default=None) |
|
518 | 518 | repo_type = Column("repo_type", String(255), nullable=False, unique=False, default=None) |
|
519 | 519 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None) |
|
520 | 520 | private = Column("private", Boolean(), nullable=True, unique=None, default=None) |
|
521 | 521 | enable_statistics = Column("statistics", Boolean(), nullable=True, unique=None, default=True) |
|
522 | 522 | enable_downloads = Column("downloads", Boolean(), nullable=True, unique=None, default=True) |
|
523 | 523 | description = Column("description", String(10000), nullable=True, unique=None, default=None) |
|
524 | 524 | created_on = Column('created_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now) |
|
525 | 525 | updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now) |
|
526 | 526 | landing_rev = Column("landing_revision", String(255), nullable=False, unique=False, default=None) |
|
527 | 527 | enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False) |
|
528 | 528 | _locked = Column("locked", String(255), nullable=True, unique=False, default=None) |
|
529 | 529 | _changeset_cache = Column("changeset_cache", LargeBinary(), nullable=True) #JSON data |
|
530 | 530 | |
|
531 | 531 | fork_id = Column("fork_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=False, default=None) |
|
532 | 532 | group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=False, default=None) |
|
533 | 533 | |
|
534 | 534 | user = relationship('User') |
|
535 | 535 | fork = relationship('Repository', remote_side=repo_id) |
|
536 | 536 | group = relationship('RepoGroup') |
|
537 | 537 | repo_to_perm = relationship('UserRepoToPerm', cascade='all', order_by='UserRepoToPerm.repo_to_perm_id') |
|
538 | 538 | users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all') |
|
539 | 539 | stats = relationship('Statistics', cascade='all', uselist=False) |
|
540 | 540 | |
|
541 | 541 | followers = relationship('UserFollowing', |
|
542 | 542 | primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id', |
|
543 | 543 | cascade='all') |
|
544 | 544 | extra_fields = relationship('RepositoryField', |
|
545 | 545 | cascade="all, delete, delete-orphan") |
|
546 | 546 | |
|
547 | 547 | logs = relationship('UserLog') |
|
548 | 548 | comments = relationship('ChangesetComment', cascade="all, delete, delete-orphan") |
|
549 | 549 | |
|
550 | 550 | pull_requests_org = relationship('PullRequest', |
|
551 | 551 | primaryjoin='PullRequest.org_repo_id==Repository.repo_id', |
|
552 | 552 | cascade="all, delete, delete-orphan") |
|
553 | 553 | |
|
554 | 554 | pull_requests_other = relationship('PullRequest', |
|
555 | 555 | primaryjoin='PullRequest.other_repo_id==Repository.repo_id', |
|
556 | 556 | cascade="all, delete, delete-orphan") |
|
557 | 557 | |
|
558 | 558 | def __unicode__(self): |
|
559 | 559 | return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id, |
|
560 | 560 | safe_unicode(self.repo_name)) |
|
561 | 561 | |
|
562 | 562 | #NOTE for this migration we are required tio have it |
|
563 | 563 | @hybrid_property |
|
564 | 564 | def changeset_cache(self): |
|
565 | 565 | from rhodecode.lib.vcs.backends.base import EmptyCommit |
|
566 | 566 | dummy = EmptyCommit().__json__() |
|
567 | 567 | if not self._changeset_cache: |
|
568 | 568 | return dummy |
|
569 | 569 | try: |
|
570 | 570 | return json.loads(self._changeset_cache) |
|
571 | 571 | except TypeError: |
|
572 | 572 | return dummy |
|
573 | 573 | |
|
574 | 574 | @changeset_cache.setter |
|
575 | 575 | def changeset_cache(self, val): |
|
576 | 576 | try: |
|
577 | 577 | self._changeset_cache = json.dumps(val) |
|
578 | 578 | except Exception: |
|
579 | 579 | log.error(traceback.format_exc()) |
|
580 | 580 | |
|
581 | 581 | @classmethod |
|
582 | 582 | def get_by_repo_name(cls, repo_name): |
|
583 | 583 | q = Session().query(cls).filter(cls.repo_name == repo_name) |
|
584 | 584 | q = q.options(joinedload(Repository.fork))\ |
|
585 | 585 | .options(joinedload(Repository.user))\ |
|
586 | 586 | .options(joinedload(Repository.group)) |
|
587 | 587 | return q.scalar() |
|
588 | 588 | |
|
589 | 589 | #NOTE this is required for this migration to work |
|
590 | 590 | def update_commit_cache(self, cs_cache=None): |
|
591 | 591 | """ |
|
592 | 592 | Update cache of last changeset for repository, keys should be:: |
|
593 | 593 | |
|
594 | 594 | short_id |
|
595 | 595 | raw_id |
|
596 | 596 | revision |
|
597 | 597 | message |
|
598 | 598 | date |
|
599 | 599 | author |
|
600 | 600 | |
|
601 | 601 | :param cs_cache: |
|
602 | 602 | """ |
|
603 | 603 | from rhodecode.lib.vcs.backends.base import BaseChangeset |
|
604 | 604 | if cs_cache is None: |
|
605 | 605 | cs_cache = EmptyCommit() |
|
606 | 606 | # Note: Using always the empty commit here in case we are |
|
607 | 607 | # upgrading towards version 3.0 and above. Reason is that in this |
|
608 | 608 | # case the vcsclient connection is not available and things |
|
609 | 609 | # would explode here. |
|
610 | 610 | |
|
611 | 611 | if isinstance(cs_cache, BaseChangeset): |
|
612 | 612 | cs_cache = cs_cache.__json__() |
|
613 | 613 | |
|
614 | 614 | if (cs_cache != self.changeset_cache or not self.changeset_cache): |
|
615 | 615 | _default = datetime.datetime.fromtimestamp(0) |
|
616 | 616 | last_change = cs_cache.get('date') or _default |
|
617 | log.debug('updated repo %s with new cs cache %s' | |
|
618 | % (self.repo_name, cs_cache)) | |
|
617 | log.debug('updated repo %s with new cs cache %s', self.repo_name, cs_cache) | |
|
619 | 618 | self.updated_on = last_change |
|
620 | 619 | self.changeset_cache = cs_cache |
|
621 | 620 | Session().add(self) |
|
622 | 621 | Session().commit() |
|
623 | 622 | else: |
|
624 | log.debug('Skipping repo:%s already with latest changes' | |
|
625 | % self.repo_name) | |
|
623 | log.debug('Skipping repo:%s already with latest changes', self.repo_name) | |
|
626 | 624 | |
|
627 | 625 | class RepoGroup(Base, BaseModel): |
|
628 | 626 | __tablename__ = 'groups' |
|
629 | 627 | __table_args__ = ( |
|
630 | 628 | UniqueConstraint('group_name', 'group_parent_id'), |
|
631 | 629 | CheckConstraint('group_id != group_parent_id'), |
|
632 | 630 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
633 | 631 | 'mysql_charset': 'utf8'}, |
|
634 | 632 | ) |
|
635 | 633 | __mapper_args__ = {'order_by': 'group_name'} |
|
636 | 634 | |
|
637 | 635 | group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
638 | 636 | group_name = Column("group_name", String(255), nullable=False, unique=True, default=None) |
|
639 | 637 | group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None) |
|
640 | 638 | group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None) |
|
641 | 639 | enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False) |
|
642 | 640 | |
|
643 | 641 | repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id') |
|
644 | 642 | users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all') |
|
645 | 643 | parent_group = relationship('RepoGroup', remote_side=group_id) |
|
646 | 644 | |
|
647 | 645 | def __init__(self, group_name='', parent_group=None): |
|
648 | 646 | self.group_name = group_name |
|
649 | 647 | self.parent_group = parent_group |
|
650 | 648 | |
|
651 | 649 | def __unicode__(self): |
|
652 | 650 | return u"<%s('%s:%s')>" % (self.__class__.__name__, self.group_id, |
|
653 | 651 | self.group_name) |
|
654 | 652 | |
|
655 | 653 | @classmethod |
|
656 | 654 | def url_sep(cls): |
|
657 | 655 | return URL_SEP |
|
658 | 656 | |
|
659 | 657 | @classmethod |
|
660 | 658 | def get_by_group_name(cls, group_name, cache=False, case_insensitive=False): |
|
661 | 659 | if case_insensitive: |
|
662 | 660 | gr = cls.query()\ |
|
663 | 661 | .filter(cls.group_name.ilike(group_name)) |
|
664 | 662 | else: |
|
665 | 663 | gr = cls.query()\ |
|
666 | 664 | .filter(cls.group_name == group_name) |
|
667 | 665 | if cache: |
|
668 | 666 | gr = gr.options(FromCache( |
|
669 | 667 | "sql_cache_short", |
|
670 | 668 | "get_group_%s" % _hash_key(group_name) |
|
671 | 669 | ) |
|
672 | 670 | ) |
|
673 | 671 | return gr.scalar() |
|
674 | 672 | |
|
675 | 673 | |
|
676 | 674 | class Permission(Base, BaseModel): |
|
677 | 675 | __tablename__ = 'permissions' |
|
678 | 676 | __table_args__ = ( |
|
679 | 677 | Index('p_perm_name_idx', 'permission_name'), |
|
680 | 678 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
681 | 679 | 'mysql_charset': 'utf8'}, |
|
682 | 680 | ) |
|
683 | 681 | PERMS = [ |
|
684 | 682 | ('repository.none', _('Repository no access')), |
|
685 | 683 | ('repository.read', _('Repository read access')), |
|
686 | 684 | ('repository.write', _('Repository write access')), |
|
687 | 685 | ('repository.admin', _('Repository admin access')), |
|
688 | 686 | |
|
689 | 687 | ('group.none', _('Repository group no access')), |
|
690 | 688 | ('group.read', _('Repository group read access')), |
|
691 | 689 | ('group.write', _('Repository group write access')), |
|
692 | 690 | ('group.admin', _('Repository group admin access')), |
|
693 | 691 | |
|
694 | 692 | ('hg.admin', _('RhodeCode Administrator')), |
|
695 | 693 | ('hg.create.none', _('Repository creation disabled')), |
|
696 | 694 | ('hg.create.repository', _('Repository creation enabled')), |
|
697 | 695 | ('hg.fork.none', _('Repository forking disabled')), |
|
698 | 696 | ('hg.fork.repository', _('Repository forking enabled')), |
|
699 | 697 | ('hg.register.none', _('Register disabled')), |
|
700 | 698 | ('hg.register.manual_activate', _('Register new user with RhodeCode ' |
|
701 | 699 | 'with manual activation')), |
|
702 | 700 | |
|
703 | 701 | ('hg.register.auto_activate', _('Register new user with RhodeCode ' |
|
704 | 702 | 'with auto activation')), |
|
705 | 703 | ] |
|
706 | 704 | |
|
707 | 705 | # defines which permissions are more important higher the more important |
|
708 | 706 | PERM_WEIGHTS = { |
|
709 | 707 | 'repository.none': 0, |
|
710 | 708 | 'repository.read': 1, |
|
711 | 709 | 'repository.write': 3, |
|
712 | 710 | 'repository.admin': 4, |
|
713 | 711 | |
|
714 | 712 | 'group.none': 0, |
|
715 | 713 | 'group.read': 1, |
|
716 | 714 | 'group.write': 3, |
|
717 | 715 | 'group.admin': 4, |
|
718 | 716 | |
|
719 | 717 | 'hg.fork.none': 0, |
|
720 | 718 | 'hg.fork.repository': 1, |
|
721 | 719 | 'hg.create.none': 0, |
|
722 | 720 | 'hg.create.repository':1 |
|
723 | 721 | } |
|
724 | 722 | |
|
725 | 723 | permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
726 | 724 | permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None) |
|
727 | 725 | permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None) |
|
728 | 726 | |
|
729 | 727 | def __unicode__(self): |
|
730 | 728 | return u"<%s('%s:%s')>" % ( |
|
731 | 729 | self.__class__.__name__, self.permission_id, self.permission_name |
|
732 | 730 | ) |
|
733 | 731 | |
|
734 | 732 | @classmethod |
|
735 | 733 | def get_by_key(cls, key): |
|
736 | 734 | return cls.query().filter(cls.permission_name == key).scalar() |
|
737 | 735 | |
|
738 | 736 | |
|
739 | 737 | class UserRepoToPerm(Base, BaseModel): |
|
740 | 738 | __tablename__ = 'repo_to_perm' |
|
741 | 739 | __table_args__ = ( |
|
742 | 740 | UniqueConstraint('user_id', 'repository_id', 'permission_id'), |
|
743 | 741 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
744 | 742 | 'mysql_charset': 'utf8'} |
|
745 | 743 | ) |
|
746 | 744 | repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
747 | 745 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
748 | 746 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
749 | 747 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) |
|
750 | 748 | |
|
751 | 749 | user = relationship('User') |
|
752 | 750 | repository = relationship('Repository') |
|
753 | 751 | permission = relationship('Permission') |
|
754 | 752 | |
|
755 | 753 | def __unicode__(self): |
|
756 | 754 | return u'<user:%s => %s >' % (self.user, self.repository) |
|
757 | 755 | |
|
758 | 756 | |
|
759 | 757 | class UserToPerm(Base, BaseModel): |
|
760 | 758 | __tablename__ = 'user_to_perm' |
|
761 | 759 | __table_args__ = ( |
|
762 | 760 | UniqueConstraint('user_id', 'permission_id'), |
|
763 | 761 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
764 | 762 | 'mysql_charset': 'utf8'} |
|
765 | 763 | ) |
|
766 | 764 | user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
767 | 765 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
768 | 766 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
769 | 767 | |
|
770 | 768 | user = relationship('User') |
|
771 | 769 | permission = relationship('Permission', lazy='joined') |
|
772 | 770 | |
|
773 | 771 | |
|
774 | 772 | class UserGroupRepoToPerm(Base, BaseModel): |
|
775 | 773 | __tablename__ = 'users_group_repo_to_perm' |
|
776 | 774 | __table_args__ = ( |
|
777 | 775 | UniqueConstraint('repository_id', 'users_group_id', 'permission_id'), |
|
778 | 776 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
779 | 777 | 'mysql_charset': 'utf8'} |
|
780 | 778 | ) |
|
781 | 779 | users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
782 | 780 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
|
783 | 781 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
784 | 782 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) |
|
785 | 783 | |
|
786 | 784 | users_group = relationship('UserGroup') |
|
787 | 785 | permission = relationship('Permission') |
|
788 | 786 | repository = relationship('Repository') |
|
789 | 787 | |
|
790 | 788 | def __unicode__(self): |
|
791 | 789 | return u'<userGroup:%s => %s >' % (self.users_group, self.repository) |
|
792 | 790 | |
|
793 | 791 | |
|
794 | 792 | class UserGroupToPerm(Base, BaseModel): |
|
795 | 793 | __tablename__ = 'users_group_to_perm' |
|
796 | 794 | __table_args__ = ( |
|
797 | 795 | UniqueConstraint('users_group_id', 'permission_id',), |
|
798 | 796 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
799 | 797 | 'mysql_charset': 'utf8'} |
|
800 | 798 | ) |
|
801 | 799 | users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
802 | 800 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
|
803 | 801 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
804 | 802 | |
|
805 | 803 | users_group = relationship('UserGroup') |
|
806 | 804 | permission = relationship('Permission') |
|
807 | 805 | |
|
808 | 806 | |
|
809 | 807 | class UserRepoGroupToPerm(Base, BaseModel): |
|
810 | 808 | __tablename__ = 'user_repo_group_to_perm' |
|
811 | 809 | __table_args__ = ( |
|
812 | 810 | UniqueConstraint('user_id', 'group_id', 'permission_id'), |
|
813 | 811 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
814 | 812 | 'mysql_charset': 'utf8'} |
|
815 | 813 | ) |
|
816 | 814 | |
|
817 | 815 | group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
818 | 816 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
819 | 817 | group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None) |
|
820 | 818 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
821 | 819 | |
|
822 | 820 | user = relationship('User') |
|
823 | 821 | group = relationship('RepoGroup') |
|
824 | 822 | permission = relationship('Permission') |
|
825 | 823 | |
|
826 | 824 | |
|
827 | 825 | class UserGroupRepoGroupToPerm(Base, BaseModel): |
|
828 | 826 | __tablename__ = 'users_group_repo_group_to_perm' |
|
829 | 827 | __table_args__ = ( |
|
830 | 828 | UniqueConstraint('users_group_id', 'group_id'), |
|
831 | 829 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
832 | 830 | 'mysql_charset': 'utf8'} |
|
833 | 831 | ) |
|
834 | 832 | |
|
835 | 833 | users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
836 | 834 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
|
837 | 835 | group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None) |
|
838 | 836 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
839 | 837 | |
|
840 | 838 | users_group = relationship('UserGroup') |
|
841 | 839 | permission = relationship('Permission') |
|
842 | 840 | group = relationship('RepoGroup') |
|
843 | 841 | |
|
844 | 842 | |
|
845 | 843 | class Statistics(Base, BaseModel): |
|
846 | 844 | __tablename__ = 'statistics' |
|
847 | 845 | __table_args__ = ( |
|
848 | 846 | UniqueConstraint('repository_id'), |
|
849 | 847 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
850 | 848 | 'mysql_charset': 'utf8'} |
|
851 | 849 | ) |
|
852 | 850 | stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
853 | 851 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None) |
|
854 | 852 | stat_on_revision = Column("stat_on_revision", Integer(), nullable=False) |
|
855 | 853 | commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data |
|
856 | 854 | commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data |
|
857 | 855 | languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data |
|
858 | 856 | |
|
859 | 857 | repository = relationship('Repository', single_parent=True) |
|
860 | 858 | |
|
861 | 859 | |
|
862 | 860 | class UserFollowing(Base, BaseModel): |
|
863 | 861 | __tablename__ = 'user_followings' |
|
864 | 862 | __table_args__ = ( |
|
865 | 863 | UniqueConstraint('user_id', 'follows_repository_id'), |
|
866 | 864 | UniqueConstraint('user_id', 'follows_user_id'), |
|
867 | 865 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
868 | 866 | 'mysql_charset': 'utf8'} |
|
869 | 867 | ) |
|
870 | 868 | |
|
871 | 869 | user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
872 | 870 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
873 | 871 | follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None) |
|
874 | 872 | follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) |
|
875 | 873 | follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now) |
|
876 | 874 | |
|
877 | 875 | user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id') |
|
878 | 876 | |
|
879 | 877 | follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id') |
|
880 | 878 | follows_repository = relationship('Repository', order_by='Repository.repo_name') |
|
881 | 879 | |
|
882 | 880 | |
|
883 | 881 | class CacheInvalidation(Base, BaseModel): |
|
884 | 882 | __tablename__ = 'cache_invalidation' |
|
885 | 883 | __table_args__ = ( |
|
886 | 884 | UniqueConstraint('cache_key'), |
|
887 | 885 | Index('key_idx', 'cache_key'), |
|
888 | 886 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
889 | 887 | 'mysql_charset': 'utf8'}, |
|
890 | 888 | ) |
|
891 | 889 | cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
892 | 890 | cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None) |
|
893 | 891 | cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None) |
|
894 | 892 | cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False) |
|
895 | 893 | |
|
896 | 894 | def __init__(self, cache_key, cache_args=''): |
|
897 | 895 | self.cache_key = cache_key |
|
898 | 896 | self.cache_args = cache_args |
|
899 | 897 | self.cache_active = False |
|
900 | 898 | |
|
901 | 899 | |
|
902 | 900 | class ChangesetComment(Base, BaseModel): |
|
903 | 901 | __tablename__ = 'changeset_comments' |
|
904 | 902 | __table_args__ = ( |
|
905 | 903 | Index('cc_revision_idx', 'revision'), |
|
906 | 904 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
907 | 905 | 'mysql_charset': 'utf8'}, |
|
908 | 906 | ) |
|
909 | 907 | comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True) |
|
910 | 908 | repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False) |
|
911 | 909 | revision = Column('revision', String(40), nullable=True) |
|
912 | 910 | pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True) |
|
913 | 911 | line_no = Column('line_no', Unicode(10), nullable=True) |
|
914 | 912 | hl_lines = Column('hl_lines', Unicode(512), nullable=True) |
|
915 | 913 | f_path = Column('f_path', Unicode(1000), nullable=True) |
|
916 | 914 | user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False) |
|
917 | 915 | text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False) |
|
918 | 916 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
919 | 917 | modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
920 | 918 | |
|
921 | 919 | author = relationship('User', lazy='joined') |
|
922 | 920 | repo = relationship('Repository') |
|
923 | 921 | status_change = relationship('ChangesetStatus', cascade="all, delete, delete-orphan") |
|
924 | 922 | pull_request = relationship('PullRequest', lazy='joined') |
|
925 | 923 | |
|
926 | 924 | @classmethod |
|
927 | 925 | def get_users(cls, revision=None, pull_request_id=None): |
|
928 | 926 | """ |
|
929 | 927 | Returns user associated with this ChangesetComment. ie those |
|
930 | 928 | who actually commented |
|
931 | 929 | |
|
932 | 930 | :param cls: |
|
933 | 931 | :param revision: |
|
934 | 932 | """ |
|
935 | 933 | q = Session().query(User)\ |
|
936 | 934 | .join(ChangesetComment.author) |
|
937 | 935 | if revision: |
|
938 | 936 | q = q.filter(cls.revision == revision) |
|
939 | 937 | elif pull_request_id: |
|
940 | 938 | q = q.filter(cls.pull_request_id == pull_request_id) |
|
941 | 939 | return q.all() |
|
942 | 940 | |
|
943 | 941 | |
|
944 | 942 | class ChangesetStatus(Base, BaseModel): |
|
945 | 943 | __tablename__ = 'changeset_statuses' |
|
946 | 944 | __table_args__ = ( |
|
947 | 945 | Index('cs_revision_idx', 'revision'), |
|
948 | 946 | Index('cs_version_idx', 'version'), |
|
949 | 947 | UniqueConstraint('repo_id', 'revision', 'version'), |
|
950 | 948 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
951 | 949 | 'mysql_charset': 'utf8'} |
|
952 | 950 | ) |
|
953 | 951 | STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed' |
|
954 | 952 | STATUS_APPROVED = 'approved' |
|
955 | 953 | STATUS_REJECTED = 'rejected' |
|
956 | 954 | STATUS_UNDER_REVIEW = 'under_review' |
|
957 | 955 | |
|
958 | 956 | STATUSES = [ |
|
959 | 957 | (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default |
|
960 | 958 | (STATUS_APPROVED, _("Approved")), |
|
961 | 959 | (STATUS_REJECTED, _("Rejected")), |
|
962 | 960 | (STATUS_UNDER_REVIEW, _("Under Review")), |
|
963 | 961 | ] |
|
964 | 962 | |
|
965 | 963 | changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True) |
|
966 | 964 | repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False) |
|
967 | 965 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None) |
|
968 | 966 | revision = Column('revision', String(40), nullable=False) |
|
969 | 967 | status = Column('status', String(128), nullable=False, default=DEFAULT) |
|
970 | 968 | changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id')) |
|
971 | 969 | modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now) |
|
972 | 970 | version = Column('version', Integer(), nullable=False, default=0) |
|
973 | 971 | pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True) |
|
974 | 972 | |
|
975 | 973 | author = relationship('User', lazy='joined') |
|
976 | 974 | repo = relationship('Repository') |
|
977 | 975 | comment = relationship('ChangesetComment', lazy='joined') |
|
978 | 976 | pull_request = relationship('PullRequest', lazy='joined') |
|
979 | 977 | |
|
980 | 978 | |
|
981 | 979 | |
|
982 | 980 | class PullRequest(Base, BaseModel): |
|
983 | 981 | __tablename__ = 'pull_requests' |
|
984 | 982 | __table_args__ = ( |
|
985 | 983 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
986 | 984 | 'mysql_charset': 'utf8'}, |
|
987 | 985 | ) |
|
988 | 986 | |
|
989 | 987 | STATUS_NEW = u'new' |
|
990 | 988 | STATUS_OPEN = u'open' |
|
991 | 989 | STATUS_CLOSED = u'closed' |
|
992 | 990 | |
|
993 | 991 | pull_request_id = Column('pull_request_id', Integer(), nullable=False, primary_key=True) |
|
994 | 992 | title = Column('title', Unicode(256), nullable=True) |
|
995 | 993 | description = Column('description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=True) |
|
996 | 994 | status = Column('status', Unicode(256), nullable=False, default=STATUS_NEW) |
|
997 | 995 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
998 | 996 | updated_on = Column('updated_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
999 | 997 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None) |
|
1000 | 998 | _revisions = Column('revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql')) |
|
1001 | 999 | org_repo_id = Column('org_repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False) |
|
1002 | 1000 | org_ref = Column('org_ref', Unicode(256), nullable=False) |
|
1003 | 1001 | other_repo_id = Column('other_repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False) |
|
1004 | 1002 | other_ref = Column('other_ref', Unicode(256), nullable=False) |
|
1005 | 1003 | |
|
1006 | 1004 | author = relationship('User', lazy='joined') |
|
1007 | 1005 | reviewers = relationship('PullRequestReviewers', |
|
1008 | 1006 | cascade="all, delete, delete-orphan") |
|
1009 | 1007 | org_repo = relationship('Repository', primaryjoin='PullRequest.org_repo_id==Repository.repo_id') |
|
1010 | 1008 | other_repo = relationship('Repository', primaryjoin='PullRequest.other_repo_id==Repository.repo_id') |
|
1011 | 1009 | statuses = relationship('ChangesetStatus') |
|
1012 | 1010 | comments = relationship('ChangesetComment', |
|
1013 | 1011 | cascade="all, delete, delete-orphan") |
|
1014 | 1012 | |
|
1015 | 1013 | |
|
1016 | 1014 | class PullRequestReviewers(Base, BaseModel): |
|
1017 | 1015 | __tablename__ = 'pull_request_reviewers' |
|
1018 | 1016 | __table_args__ = ( |
|
1019 | 1017 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
1020 | 1018 | 'mysql_charset': 'utf8'}, |
|
1021 | 1019 | ) |
|
1022 | 1020 | |
|
1023 | 1021 | def __init__(self, user=None, pull_request=None): |
|
1024 | 1022 | self.user = user |
|
1025 | 1023 | self.pull_request = pull_request |
|
1026 | 1024 | |
|
1027 | 1025 | pull_requests_reviewers_id = Column('pull_requests_reviewers_id', Integer(), nullable=False, primary_key=True) |
|
1028 | 1026 | pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=False) |
|
1029 | 1027 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True) |
|
1030 | 1028 | |
|
1031 | 1029 | user = relationship('User') |
|
1032 | 1030 | pull_request = relationship('PullRequest') |
|
1033 | 1031 | |
|
1034 | 1032 | |
|
1035 | 1033 | class Notification(Base, BaseModel): |
|
1036 | 1034 | __tablename__ = 'notifications' |
|
1037 | 1035 | __table_args__ = ( |
|
1038 | 1036 | Index('notification_type_idx', 'type'), |
|
1039 | 1037 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
1040 | 1038 | 'mysql_charset': 'utf8'}, |
|
1041 | 1039 | ) |
|
1042 | 1040 | |
|
1043 | 1041 | TYPE_CHANGESET_COMMENT = u'cs_comment' |
|
1044 | 1042 | TYPE_MESSAGE = u'message' |
|
1045 | 1043 | TYPE_MENTION = u'mention' |
|
1046 | 1044 | TYPE_REGISTRATION = u'registration' |
|
1047 | 1045 | TYPE_PULL_REQUEST = u'pull_request' |
|
1048 | 1046 | TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment' |
|
1049 | 1047 | |
|
1050 | 1048 | notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True) |
|
1051 | 1049 | subject = Column('subject', Unicode(512), nullable=True) |
|
1052 | 1050 | body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True) |
|
1053 | 1051 | created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True) |
|
1054 | 1052 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
1055 | 1053 | type_ = Column('type', Unicode(256)) |
|
1056 | 1054 | |
|
1057 | 1055 | created_by_user = relationship('User') |
|
1058 | 1056 | notifications_to_users = relationship('UserNotification', lazy='joined', |
|
1059 | 1057 | cascade="all, delete, delete-orphan") |
|
1060 | 1058 | |
|
1061 | 1059 | |
|
1062 | 1060 | class UserNotification(Base, BaseModel): |
|
1063 | 1061 | __tablename__ = 'user_to_notification' |
|
1064 | 1062 | __table_args__ = ( |
|
1065 | 1063 | UniqueConstraint('user_id', 'notification_id'), |
|
1066 | 1064 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
1067 | 1065 | 'mysql_charset': 'utf8'} |
|
1068 | 1066 | ) |
|
1069 | 1067 | user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True) |
|
1070 | 1068 | notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True) |
|
1071 | 1069 | read = Column('read', Boolean, default=False) |
|
1072 | 1070 | sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None) |
|
1073 | 1071 | |
|
1074 | 1072 | user = relationship('User', lazy="joined") |
|
1075 | 1073 | notification = relationship('Notification', lazy="joined", |
|
1076 | 1074 | order_by=lambda: Notification.created_on.desc(),) |
|
1077 | 1075 | |
|
1078 | 1076 | |
|
1079 | 1077 | class DbMigrateVersion(Base, BaseModel): |
|
1080 | 1078 | __tablename__ = 'db_migrate_version' |
|
1081 | 1079 | __table_args__ = ( |
|
1082 | 1080 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
1083 | 1081 | 'mysql_charset': 'utf8'}, |
|
1084 | 1082 | ) |
|
1085 | 1083 | repository_id = Column('repository_id', String(250), primary_key=True) |
|
1086 | 1084 | repository_path = Column('repository_path', Text) |
|
1087 | 1085 | version = Column('version', Integer) |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
General Comments 0
You need to be logged in to leave comments.
Login now