##// END OF EJS Templates
feature(caches): refactor how invalidationContext works, fixes many issues with the previousl solution...
super-admin -
r5288:c652fe5b default
parent child Browse files
Show More
@@ -1,120 +1,119 b''
1 1 # Copyright (C) 2015-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 import logging
20 20 import threading
21 21
22 22 from dogpile.cache import register_backend
23 23
24 24 from . import region_meta
25 25 from .utils import (
26 26 ActiveRegionCache,
27 FreshRegionCache,
28 27 InvalidationContext,
29 28 backend_key_generator,
30 29 clear_cache_namespace,
31 30 get_default_cache_settings,
32 31 get_or_create_region,
33 32 make_region,
34 33 str2bool,
35 34 )
36 35
37 36 module_name = 'rhodecode'
38 37
39 38 register_backend(
40 39 "dogpile.cache.rc.memory_lru", f"{module_name}.lib.rc_cache.backends",
41 40 "LRUMemoryBackend")
42 41
43 42 register_backend(
44 43 "dogpile.cache.rc.file_namespace", f"{module_name}.lib.rc_cache.backends",
45 44 "FileNamespaceBackend")
46 45
47 46 register_backend(
48 47 "dogpile.cache.rc.redis", f"{module_name}.lib.rc_cache.backends",
49 48 "RedisPickleBackend")
50 49
51 50 register_backend(
52 51 "dogpile.cache.rc.redis_msgpack", f"{module_name}.lib.rc_cache.backends",
53 52 "RedisMsgPackBackend")
54 53
55 54
56 55 log = logging.getLogger(__name__)
57 56
58 57
59 58 FILE_TREE_CACHE_VER = 'v5'
60 59 LICENSE_CACHE_VER = 'v3'
61 60 PERMISSIONS_CACHE_VER = 'v2'
62 61
63 62 CLEAR_DELETE = 'delete'
64 63 CLEAR_INVALIDATE = 'invalidate'
65 64
66 65
67 66 def async_creation_runner(cache, cache_key, creator, mutex):
68 67
69 68 def runner():
70 69 try:
71 70 value = creator()
72 71 cache.set(cache_key, value)
73 72 finally:
74 73 mutex.release()
75 74
76 75 thread = threading.Thread(target=runner)
77 76 thread.start()
78 77
79 78
80 79 def configure_dogpile_cache(settings):
81 80 cache_dir = settings.get('cache_dir')
82 81 if cache_dir:
83 82 region_meta.dogpile_config_defaults['cache_dir'] = cache_dir
84 83
85 84 rc_cache_data = get_default_cache_settings(settings, prefixes=['rc_cache.'])
86 85
87 86 # inspect available namespaces
88 87 avail_regions = set()
89 88 for key in rc_cache_data.keys():
90 89 namespace_name = key.split('.', 1)[0]
91 90 if namespace_name in avail_regions:
92 91 continue
93 92
94 93 avail_regions.add(namespace_name)
95 94 log.debug('dogpile: found following cache regions: %s', namespace_name)
96 95
97 96 new_region = make_region(
98 97 name=namespace_name,
99 98 function_key_generator=None,
100 99 async_creation_runner=None
101 100 )
102 101
103 102 new_region.configure_from_config(settings, f'rc_cache.{namespace_name}.')
104 103 new_region.function_key_generator = backend_key_generator(new_region.actual_backend)
105 104
106 105 async_creator = str2bool(settings.pop(f'rc_cache.{namespace_name}.async_creator', 'false'))
107 106 if async_creator:
108 107 log.debug('configuring region %s with async creator', new_region)
109 108 new_region.async_creation_runner = async_creation_runner
110 109
111 110 if log.isEnabledFor(logging.DEBUG):
112 111 region_args = dict(backend=new_region.actual_backend,
113 112 region_invalidator=new_region.region_invalidator.__class__)
114 113 log.debug('dogpile: registering a new region key=`%s` args=%s', namespace_name, region_args)
115 114
116 115 region_meta.dogpile_cache_regions[namespace_name] = new_region
117 116
118 117
119 118 def includeme(config):
120 119 configure_dogpile_cache(config.registry.settings)
@@ -1,403 +1,358 b''
1 # Copyright (C) 2015-2023 RhodeCode GmbH
1 # Copyright (C) 2015-2024 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 import functools
20 20 import logging
21 21 import os
22 22 import threading
23 23 import time
24 24
25 25 import decorator
26 26 from dogpile.cache import CacheRegion
27 27
28 28 import rhodecode
29 29 from rhodecode.lib.hash_utils import sha1
30 30 from rhodecode.lib.str_utils import safe_bytes
31 31 from rhodecode.lib.type_utils import str2bool # noqa :required by imports from .utils
32 32
33 33 from . import region_meta
34 34
35 35 log = logging.getLogger(__name__)
36 36
37 37
38 38 def isCython(func):
39 39 """
40 40 Private helper that checks if a function is a cython function.
41 41 """
42 42 return func.__class__.__name__ == 'cython_function_or_method'
43 43
44 44
45 45 class RhodeCodeCacheRegion(CacheRegion):
46 46
47 47 def __repr__(self):
48 48 return f'{self.__class__}(name={self.name})'
49 49
50 50 def conditional_cache_on_arguments(
51 51 self, namespace=None,
52 52 expiration_time=None,
53 53 should_cache_fn=None,
54 54 to_str=str,
55 55 function_key_generator=None,
56 56 condition=True):
57 57 """
58 58 Custom conditional decorator, that will not touch any dogpile internals if
59 59 condition isn't meet. This works a bit different from should_cache_fn
60 60 And it's faster in cases we don't ever want to compute cached values
61 61 """
62 62 expiration_time_is_callable = callable(expiration_time)
63 63 if not namespace:
64 64 namespace = getattr(self, '_default_namespace', None)
65 65
66 66 if function_key_generator is None:
67 67 function_key_generator = self.function_key_generator
68 68
69 69 def get_or_create_for_user_func(func_key_generator, user_func, *arg, **kw):
70 70
71 71 if not condition:
72 72 log.debug('Calling un-cached method:%s', user_func.__name__)
73 73 start = time.time()
74 74 result = user_func(*arg, **kw)
75 75 total = time.time() - start
76 76 log.debug('un-cached method:%s took %.4fs', user_func.__name__, total)
77 77 return result
78 78
79 79 key = func_key_generator(*arg, **kw)
80 80
81 81 timeout = expiration_time() if expiration_time_is_callable \
82 82 else expiration_time
83 83
84 84 log.debug('Calling cached method:`%s`', user_func.__name__)
85 85 return self.get_or_create(key, user_func, timeout, should_cache_fn, (arg, kw))
86 86
87 87 def cache_decorator(user_func):
88 88 if to_str is str:
89 89 # backwards compatible
90 90 key_generator = function_key_generator(namespace, user_func)
91 91 else:
92 92 key_generator = function_key_generator(namespace, user_func, to_str=to_str)
93 93
94 94 def refresh(*arg, **kw):
95 95 """
96 96 Like invalidate, but regenerates the value instead
97 97 """
98 98 key = key_generator(*arg, **kw)
99 99 value = user_func(*arg, **kw)
100 100 self.set(key, value)
101 101 return value
102 102
103 103 def invalidate(*arg, **kw):
104 104 key = key_generator(*arg, **kw)
105 105 self.delete(key)
106 106
107 107 def set_(value, *arg, **kw):
108 108 key = key_generator(*arg, **kw)
109 109 self.set(key, value)
110 110
111 111 def get(*arg, **kw):
112 112 key = key_generator(*arg, **kw)
113 113 return self.get(key)
114 114
115 115 user_func.set = set_
116 116 user_func.invalidate = invalidate
117 117 user_func.get = get
118 118 user_func.refresh = refresh
119 119 user_func.key_generator = key_generator
120 120 user_func.original = user_func
121 121
122 122 # Use `decorate` to preserve the signature of :param:`user_func`.
123 123 return decorator.decorate(user_func, functools.partial(
124 124 get_or_create_for_user_func, key_generator))
125 125
126 126 return cache_decorator
127 127
128 128
129 129 def make_region(*arg, **kw):
130 130 return RhodeCodeCacheRegion(*arg, **kw)
131 131
132 132
133 133 def get_default_cache_settings(settings, prefixes=None):
134 134 prefixes = prefixes or []
135 135 cache_settings = {}
136 136 for key in settings.keys():
137 137 for prefix in prefixes:
138 138 if key.startswith(prefix):
139 139 name = key.split(prefix)[1].strip()
140 140 val = settings[key]
141 141 if isinstance(val, str):
142 142 val = val.strip()
143 143 cache_settings[name] = val
144 144 return cache_settings
145 145
146 146
147 147 def compute_key_from_params(*args):
148 148 """
149 149 Helper to compute key from given params to be used in cache manager
150 150 """
151 151 return sha1(safe_bytes("_".join(map(str, args))))
152 152
153 153
154 154 def custom_key_generator(backend, namespace, fn):
155 155 func_name = fn.__name__
156 156
157 157 def generate_key(*args):
158 158 backend_pref = getattr(backend, 'key_prefix', None) or 'backend_prefix'
159 159 namespace_pref = namespace or 'default_namespace'
160 160 arg_key = compute_key_from_params(*args)
161 161 final_key = f"{backend_pref}:{namespace_pref}:{func_name}_{arg_key}"
162 162
163 163 return final_key
164 164
165 165 return generate_key
166 166
167 167
168 168 def backend_key_generator(backend):
169 169 """
170 170 Special wrapper that also sends over the backend to the key generator
171 171 """
172 172 def wrapper(namespace, fn):
173 173 return custom_key_generator(backend, namespace, fn)
174 174 return wrapper
175 175
176 176
177 177 def get_or_create_region(region_name, region_namespace: str = None, use_async_runner=False):
178 178 from .backends import FileNamespaceBackend
179 179 from . import async_creation_runner
180 180
181 181 region_obj = region_meta.dogpile_cache_regions.get(region_name)
182 182 if not region_obj:
183 183 reg_keys = list(region_meta.dogpile_cache_regions.keys())
184 184 raise OSError(f'Region `{region_name}` not in configured: {reg_keys}.')
185 185
186 186 region_uid_name = f'{region_name}:{region_namespace}'
187 187
188 188 # Special case for ONLY the FileNamespaceBackend backend. We register one-file-per-region
189 189 if isinstance(region_obj.actual_backend, FileNamespaceBackend):
190 190 if not region_namespace:
191 191 raise ValueError(f'{FileNamespaceBackend} used requires to specify region_namespace param')
192 192
193 193 region_exist = region_meta.dogpile_cache_regions.get(region_namespace)
194 194 if region_exist:
195 195 log.debug('Using already configured region: %s', region_namespace)
196 196 return region_exist
197 197
198 198 expiration_time = region_obj.expiration_time
199 199
200 200 cache_dir = region_meta.dogpile_config_defaults['cache_dir']
201 201 namespace_cache_dir = cache_dir
202 202
203 203 # we default the namespace_cache_dir to our default cache dir.
204 204 # however, if this backend is configured with filename= param, we prioritize that
205 205 # so all caches within that particular region, even those namespaced end up in the same path
206 206 if region_obj.actual_backend.filename:
207 207 namespace_cache_dir = os.path.dirname(region_obj.actual_backend.filename)
208 208
209 209 if not os.path.isdir(namespace_cache_dir):
210 210 os.makedirs(namespace_cache_dir)
211 211 new_region = make_region(
212 212 name=region_uid_name,
213 213 function_key_generator=backend_key_generator(region_obj.actual_backend)
214 214 )
215 215
216 216 namespace_filename = os.path.join(
217 217 namespace_cache_dir, f"{region_name}_{region_namespace}.cache_db")
218 218 # special type that allows 1db per namespace
219 219 new_region.configure(
220 220 backend='dogpile.cache.rc.file_namespace',
221 221 expiration_time=expiration_time,
222 222 arguments={"filename": namespace_filename}
223 223 )
224 224
225 225 # create and save in region caches
226 226 log.debug('configuring new region: %s', region_uid_name)
227 227 region_obj = region_meta.dogpile_cache_regions[region_namespace] = new_region
228 228
229 229 region_obj._default_namespace = region_namespace
230 230 if use_async_runner:
231 231 region_obj.async_creation_runner = async_creation_runner
232 232 return region_obj
233 233
234 234
235 235 def clear_cache_namespace(cache_region: str | RhodeCodeCacheRegion, cache_namespace_uid: str, method: str) -> int:
236 236 from . import CLEAR_DELETE, CLEAR_INVALIDATE
237 237
238 238 if not isinstance(cache_region, RhodeCodeCacheRegion):
239 239 cache_region = get_or_create_region(cache_region, cache_namespace_uid)
240 240 log.debug('clearing cache region: %s [prefix:%s] with method=%s',
241 241 cache_region, cache_namespace_uid, method)
242 242
243 243 num_affected_keys = 0
244 244
245 245 if method == CLEAR_INVALIDATE:
246 246 # NOTE: The CacheRegion.invalidate() method’s default mode of
247 247 # operation is to set a timestamp local to this CacheRegion in this Python process only.
248 248 # It does not impact other Python processes or regions as the timestamp is only stored locally in memory.
249 249 cache_region.invalidate(hard=True)
250 250
251 251 if method == CLEAR_DELETE:
252 252 num_affected_keys = cache_region.backend.delete_multi_by_prefix(prefix=cache_namespace_uid)
253 253
254 254 return num_affected_keys
255 255
256 256
257 257 class ActiveRegionCache(object):
258 def __init__(self, context, cache_data):
258 def __init__(self, context, cache_data: dict):
259 259 self.context = context
260 260 self.cache_data = cache_data
261 261
262 def should_invalidate(self):
263 return False
264
265
266 class FreshRegionCache(object):
267 def __init__(self, context, cache_data):
268 self.context = context
269 self.cache_data = cache_data
270
271 def should_invalidate(self):
272 return True
262 @property
263 def state_uid(self) -> str:
264 return self.cache_data['cache_state_uid']
273 265
274 266
275 267 class InvalidationContext(object):
276 268 """
277 269 usage::
278 270
279 271 from rhodecode.lib import rc_cache
280 272
281 cache_namespace_uid = CacheKey.SOME_NAMESPACE.format(1)
282 region = rc_cache.get_or_create_region('some_region', cache_namespace_uid)
273 repo_namespace_key = 'some-cache-for-repo-id-100'
274 inv_context_manager = rc_cache.InvalidationContext(key=repo_namespace_key)
275
276 def cache_generator(_state_uid):
283 277
284 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=True)
285 def heavy_compute(cache_name, param1, param2):
286 print('COMPUTE {}, {}, {}'.format(cache_name, param1, param2))
287
288 # invalidation namespace is shared namespace key for all process caches
289 # we use it to send a global signal
290 invalidation_namespace = 'repo_cache:1'
278 @region.conditional_cache_on_arguments(namespace='some-common-namespace-100')
279 def _dummy_func(*args):
280 # compute heavy function
281 return _state_uid, 'result'
291 282
292 inv_context_manager = rc_cache.InvalidationContext(
293 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace)
283 return _dummy_func
284
294 285 with inv_context_manager as invalidation_context:
295 args = ('one', 'two')
296 # re-compute and store cache if we get invalidate signal
297 if invalidation_context.should_invalidate():
298 result = heavy_compute.refresh(*args)
299 else:
300 result = heavy_compute(*args)
286 cache_state_uid = invalidation_context.state_uid
287 cache_func = cache_generator(cache_state_uid)
288 previous_state_uid, result = cache_func(*call_args)
301 289
302 compute_time = inv_context_manager.compute_time
303 log.debug('result computed in %.4fs', compute_time)
290 should_invalidate = previous_state_uid != cache_state_uid
291 if should_invalidate:
292 _, result = cache_func.refresh(*call_args)
304 293
305 294 # To send global invalidation signal, simply run
306 CacheKey.set_invalidate(invalidation_namespace)
295 CacheKey.set_invalidate(repo_namespace_key)
307 296
308 297 """
309 298
310 299 def __repr__(self):
311 return f'<InvalidationContext:{self.cache_key}[{self.uid}]>'
300 return f'<InvalidationContext:{self.cache_key}>'
312 301
313 def __init__(self, uid, invalidation_namespace='',
314 raise_exception=False, thread_scoped=None):
315 self.uid = uid
316 self.invalidation_namespace = invalidation_namespace
302 def __init__(self, key, raise_exception=False, thread_scoped=None):
303 self.cache_key = key
304
317 305 self.raise_exception = raise_exception
318 self.proc_id = rhodecode.CONFIG.get('instance_id') or 'DEFAULT'
306 self.proc_id = rhodecode.ConfigGet().get_str('instance_id') or 'DEFAULT'
319 307 self.thread_id = 'global'
320 308
321 309 if thread_scoped is None:
322 310 # if we set "default" we can override this via .ini settings
323 311 thread_scoped = rhodecode.ConfigGet().get_bool('cache_thread_scoped')
324 312
325 313 # Append the thread id to the cache key if this invalidation context
326 314 # should be scoped to the current thread.
327 315 if thread_scoped is True:
328 316 self.thread_id = threading.current_thread().ident
329 317
330 self.cache_key = compute_key_from_params(uid)
331 self.cache_key = 'proc:{}|thread:{}|params:{}'.format(
332 self.proc_id, self.thread_id, self.cache_key)
333 self.proc_key = f'proc:{self.proc_id}'
318 self.proc_key = f'proc:{self.proc_id}|thread:{self.thread_id}|key:{self.cache_key}'
334 319 self.compute_time = 0
335 320
336 def get_or_create_cache_obj(self, cache_type, invalidation_namespace=''):
337 from rhodecode.model.db import CacheKey
321 def get_or_create_cache_obj(self):
322 from rhodecode.model.db import CacheKey, Session, IntegrityError
338 323
339 invalidation_namespace = invalidation_namespace or self.invalidation_namespace
340 # fetch all cache keys for this namespace and convert them to a map to find if we
341 # have specific cache_key object registered. We do this because we want to have
342 # all consistent cache_state_uid for newly registered objects
343 cache_obj_map = CacheKey.get_namespace_map(invalidation_namespace)
344 cache_obj = cache_obj_map.get(self.cache_key)
324 cache_obj = CacheKey.get_active_cache(self.cache_key)
345 325 log.debug('Fetched cache obj %s using %s cache key.', cache_obj, self.cache_key)
346 326
347 327 if not cache_obj:
348 new_cache_args = invalidation_namespace
349 first_cache_obj = next(iter(cache_obj_map.values())) if cache_obj_map else None
350 cache_state_uid = None
351 if first_cache_obj:
352 cache_state_uid = first_cache_obj.cache_state_uid
353 cache_obj = CacheKey(self.cache_key, cache_args=new_cache_args,
354 cache_state_uid=cache_state_uid)
355
328 # generate new UID for non-existing cache object
329 cache_state_uid = CacheKey.generate_new_state_uid()
330 cache_obj = CacheKey(self.cache_key, cache_args=f'repo_state:{self._start_time}',
331 cache_state_uid=cache_state_uid, cache_active=True)
332 try:
333 Session().add(cache_obj)
334 Session().commit()
335 except IntegrityError:
336 # if we catch integrity error, it means we inserted this object
337 # assumption is that's really an edge race-condition case and
338 # it's safe is to skip it
339 Session().rollback()
340 except Exception:
341 log.exception('Failed to commit on cache key update')
342 Session().rollback()
343 if self.raise_exception:
344 raise
356 345 return cache_obj
357 346
358 347 def __enter__(self):
359 """
360 Test if current object is valid, and return CacheRegion function
361 that does invalidation and calculation
362 """
363 log.debug('Entering cache invalidation check context: %s', self.invalidation_namespace)
364 # register or get a new key based on uid
365 self.cache_obj = self.get_or_create_cache_obj(cache_type=self.uid)
366 cache_data = self.cache_obj.get_dict()
348 log.debug('Entering cache invalidation check context: %s', self)
367 349 self._start_time = time.time()
368 if self.cache_obj.cache_active:
369 # means our cache obj is existing and marked as it's
370 # cache is not outdated, we return ActiveRegionCache
371 self.skip_cache_active_change = True
372 350
373 return ActiveRegionCache(context=self, cache_data=cache_data)
351 self.cache_obj = self.get_or_create_cache_obj()
352 cache_data = self.cache_obj.get_dict()
374 353
375 # the key is either not existing or set to False, we return
376 # the real invalidator which re-computes value. We additionally set
377 # the flag to actually update the Database objects
378 self.skip_cache_active_change = False
379 return FreshRegionCache(context=self, cache_data=cache_data)
354 return ActiveRegionCache(context=self, cache_data=cache_data)
380 355
381 356 def __exit__(self, exc_type, exc_val, exc_tb):
382 from rhodecode.model.db import IntegrityError, Session
383
384 357 # save compute time
385 358 self.compute_time = time.time() - self._start_time
386
387 if self.skip_cache_active_change:
388 return
389
390 try:
391 self.cache_obj.cache_active = True
392 Session().add(self.cache_obj)
393 Session().commit()
394 except IntegrityError:
395 # if we catch integrity error, it means we inserted this object
396 # assumption is that's really an edge race-condition case and
397 # it's safe is to skip it
398 Session().rollback()
399 except Exception:
400 log.exception('Failed to commit on cache key update')
401 Session().rollback()
402 if self.raise_exception:
403 raise
@@ -1,5877 +1,5886 b''
1 1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 """
20 20 Database Models for RhodeCode Enterprise
21 21 """
22 22
23 23 import re
24 24 import os
25 25 import time
26 26 import string
27 27 import logging
28 28 import datetime
29 29 import uuid
30 30 import warnings
31 31 import ipaddress
32 32 import functools
33 33 import traceback
34 34 import collections
35 35
36 36 from sqlalchemy import (
37 37 or_, and_, not_, func, cast, TypeDecorator, event, select,
38 38 true, false, null,
39 39 Index, Sequence, UniqueConstraint, ForeignKey, CheckConstraint, Column,
40 40 Boolean, String, Unicode, UnicodeText, DateTime, Integer, LargeBinary,
41 41 Text, Float, PickleType, BigInteger)
42 42 from sqlalchemy.sql.expression import case
43 43 from sqlalchemy.sql.functions import coalesce, count # pragma: no cover
44 44 from sqlalchemy.orm import (
45 45 relationship, lazyload, joinedload, class_mapper, validates, aliased, load_only)
46 46 from sqlalchemy.ext.declarative import declared_attr
47 47 from sqlalchemy.ext.hybrid import hybrid_property
48 48 from sqlalchemy.exc import IntegrityError # pragma: no cover
49 49 from sqlalchemy.dialects.mysql import LONGTEXT
50 50 from zope.cachedescriptors.property import Lazy as LazyProperty
51 51 from pyramid.threadlocal import get_current_request
52 52 from webhelpers2.text import remove_formatting
53 53
54 54 from rhodecode.lib.str_utils import safe_bytes
55 55 from rhodecode.translation import _
56 56 from rhodecode.lib.vcs import get_vcs_instance, VCSError
57 57 from rhodecode.lib.vcs.backends.base import (
58 58 EmptyCommit, Reference, unicode_to_reference, reference_to_unicode)
59 59 from rhodecode.lib.utils2 import (
60 60 str2bool, safe_str, get_commit_safe, sha1_safe,
61 61 time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict,
62 62 glob2re, StrictAttributeDict, cleaned_uri, datetime_to_time)
63 63 from rhodecode.lib.jsonalchemy import (
64 64 MutationObj, MutationList, JsonType, JsonRaw)
65 65 from rhodecode.lib.hash_utils import sha1
66 66 from rhodecode.lib import ext_json
67 67 from rhodecode.lib import enc_utils
68 68 from rhodecode.lib.ext_json import json, str_json
69 69 from rhodecode.lib.caching_query import FromCache
70 70 from rhodecode.lib.exceptions import (
71 71 ArtifactMetadataDuplicate, ArtifactMetadataBadValueType)
72 72 from rhodecode.model.meta import Base, Session
73 73
74 74 URL_SEP = '/'
75 75 log = logging.getLogger(__name__)
76 76
77 77 # =============================================================================
78 78 # BASE CLASSES
79 79 # =============================================================================
80 80
81 81 # this is propagated from .ini file rhodecode.encrypted_values.secret or
82 82 # beaker.session.secret if first is not set.
83 83 # and initialized at environment.py
84 84 ENCRYPTION_KEY: bytes = b''
85 85
86 86 # used to sort permissions by types, '#' used here is not allowed to be in
87 87 # usernames, and it's very early in sorted string.printable table.
88 88 PERMISSION_TYPE_SORT = {
89 89 'admin': '####',
90 90 'write': '###',
91 91 'read': '##',
92 92 'none': '#',
93 93 }
94 94
95 95
96 96 def display_user_sort(obj):
97 97 """
98 98 Sort function used to sort permissions in .permissions() function of
99 99 Repository, RepoGroup, UserGroup. Also it put the default user in front
100 100 of all other resources
101 101 """
102 102
103 103 if obj.username == User.DEFAULT_USER:
104 104 return '#####'
105 105 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
106 106 extra_sort_num = '1' # default
107 107
108 108 # NOTE(dan): inactive duplicates goes last
109 109 if getattr(obj, 'duplicate_perm', None):
110 110 extra_sort_num = '9'
111 111 return prefix + extra_sort_num + obj.username
112 112
113 113
114 114 def display_user_group_sort(obj):
115 115 """
116 116 Sort function used to sort permissions in .permissions() function of
117 117 Repository, RepoGroup, UserGroup. Also it put the default user in front
118 118 of all other resources
119 119 """
120 120
121 121 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
122 122 return prefix + obj.users_group_name
123 123
124 124
125 125 def _hash_key(k):
126 126 return sha1_safe(k)
127 127
128 128
129 129 def in_filter_generator(qry, items, limit=500):
130 130 """
131 131 Splits IN() into multiple with OR
132 132 e.g.::
133 133 cnt = Repository.query().filter(
134 134 or_(
135 135 *in_filter_generator(Repository.repo_id, range(100000))
136 136 )).count()
137 137 """
138 138 if not items:
139 139 # empty list will cause empty query which might cause security issues
140 140 # this can lead to hidden unpleasant results
141 141 items = [-1]
142 142
143 143 parts = []
144 144 for chunk in range(0, len(items), limit):
145 145 parts.append(
146 146 qry.in_(items[chunk: chunk + limit])
147 147 )
148 148
149 149 return parts
150 150
151 151
152 152 base_table_args = {
153 153 'extend_existing': True,
154 154 'mysql_engine': 'InnoDB',
155 155 'mysql_charset': 'utf8',
156 156 'sqlite_autoincrement': True
157 157 }
158 158
159 159
160 160 class EncryptedTextValue(TypeDecorator):
161 161 """
162 162 Special column for encrypted long text data, use like::
163 163
164 164 value = Column("encrypted_value", EncryptedValue(), nullable=False)
165 165
166 166 This column is intelligent so if value is in unencrypted form it return
167 167 unencrypted form, but on save it always encrypts
168 168 """
169 169 cache_ok = True
170 170 impl = Text
171 171
172 172 def process_bind_param(self, value, dialect):
173 173 """
174 174 Setter for storing value
175 175 """
176 176 import rhodecode
177 177 if not value:
178 178 return value
179 179
180 180 # protect against double encrypting if values is already encrypted
181 181 if value.startswith('enc$aes$') \
182 182 or value.startswith('enc$aes_hmac$') \
183 183 or value.startswith('enc2$'):
184 184 raise ValueError('value needs to be in unencrypted format, '
185 185 'ie. not starting with enc$ or enc2$')
186 186
187 187 algo = rhodecode.CONFIG.get('rhodecode.encrypted_values.algorithm') or 'aes'
188 188 bytes_val = enc_utils.encrypt_value(value, enc_key=ENCRYPTION_KEY, algo=algo)
189 189 return safe_str(bytes_val)
190 190
191 191 def process_result_value(self, value, dialect):
192 192 """
193 193 Getter for retrieving value
194 194 """
195 195
196 196 import rhodecode
197 197 if not value:
198 198 return value
199 199
200 200 enc_strict_mode = rhodecode.ConfigGet().get_bool('rhodecode.encrypted_values.strict', missing=True)
201 201
202 202 bytes_val = enc_utils.decrypt_value(value, enc_key=ENCRYPTION_KEY, strict_mode=enc_strict_mode)
203 203
204 204 return safe_str(bytes_val)
205 205
206 206
207 207 class BaseModel(object):
208 208 """
209 209 Base Model for all classes
210 210 """
211 211
212 212 @classmethod
213 213 def _get_keys(cls):
214 214 """return column names for this model """
215 215 return class_mapper(cls).c.keys()
216 216
217 217 def get_dict(self):
218 218 """
219 219 return dict with keys and values corresponding
220 220 to this model data """
221 221
222 222 d = {}
223 223 for k in self._get_keys():
224 224 d[k] = getattr(self, k)
225 225
226 226 # also use __json__() if present to get additional fields
227 227 _json_attr = getattr(self, '__json__', None)
228 228 if _json_attr:
229 229 # update with attributes from __json__
230 230 if callable(_json_attr):
231 231 _json_attr = _json_attr()
232 232 for k, val in _json_attr.items():
233 233 d[k] = val
234 234 return d
235 235
236 236 def get_appstruct(self):
237 237 """return list with keys and values tuples corresponding
238 238 to this model data """
239 239
240 240 lst = []
241 241 for k in self._get_keys():
242 242 lst.append((k, getattr(self, k),))
243 243 return lst
244 244
245 245 def populate_obj(self, populate_dict):
246 246 """populate model with data from given populate_dict"""
247 247
248 248 for k in self._get_keys():
249 249 if k in populate_dict:
250 250 setattr(self, k, populate_dict[k])
251 251
252 252 @classmethod
253 253 def query(cls):
254 254 return Session().query(cls)
255 255
256 256 @classmethod
257 257 def select(cls, custom_cls=None):
258 258 """
259 259 stmt = cls.select().where(cls.user_id==1)
260 260 # optionally
261 261 stmt = cls.select(User.user_id).where(cls.user_id==1)
262 262 result = cls.execute(stmt) | cls.scalars(stmt)
263 263 """
264 264
265 265 if custom_cls:
266 266 stmt = select(custom_cls)
267 267 else:
268 268 stmt = select(cls)
269 269 return stmt
270 270
271 271 @classmethod
272 272 def execute(cls, stmt):
273 273 return Session().execute(stmt)
274 274
275 275 @classmethod
276 276 def scalars(cls, stmt):
277 277 return Session().scalars(stmt)
278 278
279 279 @classmethod
280 280 def get(cls, id_):
281 281 if id_:
282 282 return cls.query().get(id_)
283 283
284 284 @classmethod
285 285 def get_or_404(cls, id_):
286 286 from pyramid.httpexceptions import HTTPNotFound
287 287
288 288 try:
289 289 id_ = int(id_)
290 290 except (TypeError, ValueError):
291 291 raise HTTPNotFound()
292 292
293 293 res = cls.query().get(id_)
294 294 if not res:
295 295 raise HTTPNotFound()
296 296 return res
297 297
298 298 @classmethod
299 299 def getAll(cls):
300 300 # deprecated and left for backward compatibility
301 301 return cls.get_all()
302 302
303 303 @classmethod
304 304 def get_all(cls):
305 305 return cls.query().all()
306 306
307 307 @classmethod
308 308 def delete(cls, id_):
309 309 obj = cls.query().get(id_)
310 310 Session().delete(obj)
311 311
312 312 @classmethod
313 313 def identity_cache(cls, session, attr_name, value):
314 314 exist_in_session = []
315 315 for (item_cls, pkey), instance in session.identity_map.items():
316 316 if cls == item_cls and getattr(instance, attr_name) == value:
317 317 exist_in_session.append(instance)
318 318 if exist_in_session:
319 319 if len(exist_in_session) == 1:
320 320 return exist_in_session[0]
321 321 log.exception(
322 322 'multiple objects with attr %s and '
323 323 'value %s found with same name: %r',
324 324 attr_name, value, exist_in_session)
325 325
326 326 @property
327 327 def cls_name(self):
328 328 return self.__class__.__name__
329 329
330 330 def __repr__(self):
331 331 return f'<DB:{self.cls_name}>'
332 332
333 333
334 334 class RhodeCodeSetting(Base, BaseModel):
335 335 __tablename__ = 'rhodecode_settings'
336 336 __table_args__ = (
337 337 UniqueConstraint('app_settings_name'),
338 338 base_table_args
339 339 )
340 340
341 341 SETTINGS_TYPES = {
342 342 'str': safe_str,
343 343 'int': safe_int,
344 344 'unicode': safe_str,
345 345 'bool': str2bool,
346 346 'list': functools.partial(aslist, sep=',')
347 347 }
348 348 DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions'
349 349 GLOBAL_CONF_KEY = 'app_settings'
350 350
351 351 app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
352 352 app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None)
353 353 _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None)
354 354 _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None)
355 355
356 356 def __init__(self, key='', val='', type='unicode'):
357 357 self.app_settings_name = key
358 358 self.app_settings_type = type
359 359 self.app_settings_value = val
360 360
361 361 @validates('_app_settings_value')
362 362 def validate_settings_value(self, key, val):
363 363 assert type(val) == str
364 364 return val
365 365
366 366 @hybrid_property
367 367 def app_settings_value(self):
368 368 v = self._app_settings_value
369 369 _type = self.app_settings_type
370 370 if _type:
371 371 _type = self.app_settings_type.split('.')[0]
372 372 # decode the encrypted value
373 373 if 'encrypted' in self.app_settings_type:
374 374 cipher = EncryptedTextValue()
375 375 v = safe_str(cipher.process_result_value(v, None))
376 376
377 377 converter = self.SETTINGS_TYPES.get(_type) or \
378 378 self.SETTINGS_TYPES['unicode']
379 379 return converter(v)
380 380
381 381 @app_settings_value.setter
382 382 def app_settings_value(self, val):
383 383 """
384 384 Setter that will always make sure we use unicode in app_settings_value
385 385
386 386 :param val:
387 387 """
388 388 val = safe_str(val)
389 389 # encode the encrypted value
390 390 if 'encrypted' in self.app_settings_type:
391 391 cipher = EncryptedTextValue()
392 392 val = safe_str(cipher.process_bind_param(val, None))
393 393 self._app_settings_value = val
394 394
395 395 @hybrid_property
396 396 def app_settings_type(self):
397 397 return self._app_settings_type
398 398
399 399 @app_settings_type.setter
400 400 def app_settings_type(self, val):
401 401 if val.split('.')[0] not in self.SETTINGS_TYPES:
402 402 raise Exception('type must be one of %s got %s'
403 403 % (self.SETTINGS_TYPES.keys(), val))
404 404 self._app_settings_type = val
405 405
406 406 @classmethod
407 407 def get_by_prefix(cls, prefix):
408 408 return RhodeCodeSetting.query()\
409 409 .filter(RhodeCodeSetting.app_settings_name.startswith(prefix))\
410 410 .all()
411 411
412 412 def __repr__(self):
413 413 return "<%s('%s:%s[%s]')>" % (
414 414 self.cls_name,
415 415 self.app_settings_name, self.app_settings_value,
416 416 self.app_settings_type
417 417 )
418 418
419 419
420 420 class RhodeCodeUi(Base, BaseModel):
421 421 __tablename__ = 'rhodecode_ui'
422 422 __table_args__ = (
423 423 UniqueConstraint('ui_key'),
424 424 base_table_args
425 425 )
426 426 # Sync those values with vcsserver.config.hooks
427 427
428 428 HOOK_REPO_SIZE = 'changegroup.repo_size'
429 429 # HG
430 430 HOOK_PRE_PULL = 'preoutgoing.pre_pull'
431 431 HOOK_PULL = 'outgoing.pull_logger'
432 432 HOOK_PRE_PUSH = 'prechangegroup.pre_push'
433 433 HOOK_PRETX_PUSH = 'pretxnchangegroup.pre_push'
434 434 HOOK_PUSH = 'changegroup.push_logger'
435 435 HOOK_PUSH_KEY = 'pushkey.key_push'
436 436
437 437 HOOKS_BUILTIN = [
438 438 HOOK_PRE_PULL,
439 439 HOOK_PULL,
440 440 HOOK_PRE_PUSH,
441 441 HOOK_PRETX_PUSH,
442 442 HOOK_PUSH,
443 443 HOOK_PUSH_KEY,
444 444 ]
445 445
446 446 # TODO: johbo: Unify way how hooks are configured for git and hg,
447 447 # git part is currently hardcoded.
448 448
449 449 # SVN PATTERNS
450 450 SVN_BRANCH_ID = 'vcs_svn_branch'
451 451 SVN_TAG_ID = 'vcs_svn_tag'
452 452
453 453 ui_id = Column(
454 454 "ui_id", Integer(), nullable=False, unique=True, default=None,
455 455 primary_key=True)
456 456 ui_section = Column(
457 457 "ui_section", String(255), nullable=True, unique=None, default=None)
458 458 ui_key = Column(
459 459 "ui_key", String(255), nullable=True, unique=None, default=None)
460 460 ui_value = Column(
461 461 "ui_value", String(255), nullable=True, unique=None, default=None)
462 462 ui_active = Column(
463 463 "ui_active", Boolean(), nullable=True, unique=None, default=True)
464 464
465 465 def __repr__(self):
466 466 return '<%s[%s]%s=>%s]>' % (self.cls_name, self.ui_section,
467 467 self.ui_key, self.ui_value)
468 468
469 469
470 470 class RepoRhodeCodeSetting(Base, BaseModel):
471 471 __tablename__ = 'repo_rhodecode_settings'
472 472 __table_args__ = (
473 473 UniqueConstraint(
474 474 'app_settings_name', 'repository_id',
475 475 name='uq_repo_rhodecode_setting_name_repo_id'),
476 476 base_table_args
477 477 )
478 478
479 479 repository_id = Column(
480 480 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
481 481 nullable=False)
482 482 app_settings_id = Column(
483 483 "app_settings_id", Integer(), nullable=False, unique=True,
484 484 default=None, primary_key=True)
485 485 app_settings_name = Column(
486 486 "app_settings_name", String(255), nullable=True, unique=None,
487 487 default=None)
488 488 _app_settings_value = Column(
489 489 "app_settings_value", String(4096), nullable=True, unique=None,
490 490 default=None)
491 491 _app_settings_type = Column(
492 492 "app_settings_type", String(255), nullable=True, unique=None,
493 493 default=None)
494 494
495 495 repository = relationship('Repository', viewonly=True)
496 496
497 497 def __init__(self, repository_id, key='', val='', type='unicode'):
498 498 self.repository_id = repository_id
499 499 self.app_settings_name = key
500 500 self.app_settings_type = type
501 501 self.app_settings_value = val
502 502
503 503 @validates('_app_settings_value')
504 504 def validate_settings_value(self, key, val):
505 505 assert type(val) == str
506 506 return val
507 507
508 508 @hybrid_property
509 509 def app_settings_value(self):
510 510 v = self._app_settings_value
511 511 type_ = self.app_settings_type
512 512 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
513 513 converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode']
514 514 return converter(v)
515 515
516 516 @app_settings_value.setter
517 517 def app_settings_value(self, val):
518 518 """
519 519 Setter that will always make sure we use unicode in app_settings_value
520 520
521 521 :param val:
522 522 """
523 523 self._app_settings_value = safe_str(val)
524 524
525 525 @hybrid_property
526 526 def app_settings_type(self):
527 527 return self._app_settings_type
528 528
529 529 @app_settings_type.setter
530 530 def app_settings_type(self, val):
531 531 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
532 532 if val not in SETTINGS_TYPES:
533 533 raise Exception('type must be one of %s got %s'
534 534 % (SETTINGS_TYPES.keys(), val))
535 535 self._app_settings_type = val
536 536
537 537 def __repr__(self):
538 538 return "<%s('%s:%s:%s[%s]')>" % (
539 539 self.cls_name, self.repository.repo_name,
540 540 self.app_settings_name, self.app_settings_value,
541 541 self.app_settings_type
542 542 )
543 543
544 544
545 545 class RepoRhodeCodeUi(Base, BaseModel):
546 546 __tablename__ = 'repo_rhodecode_ui'
547 547 __table_args__ = (
548 548 UniqueConstraint(
549 549 'repository_id', 'ui_section', 'ui_key',
550 550 name='uq_repo_rhodecode_ui_repository_id_section_key'),
551 551 base_table_args
552 552 )
553 553
554 554 repository_id = Column(
555 555 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
556 556 nullable=False)
557 557 ui_id = Column(
558 558 "ui_id", Integer(), nullable=False, unique=True, default=None,
559 559 primary_key=True)
560 560 ui_section = Column(
561 561 "ui_section", String(255), nullable=True, unique=None, default=None)
562 562 ui_key = Column(
563 563 "ui_key", String(255), nullable=True, unique=None, default=None)
564 564 ui_value = Column(
565 565 "ui_value", String(255), nullable=True, unique=None, default=None)
566 566 ui_active = Column(
567 567 "ui_active", Boolean(), nullable=True, unique=None, default=True)
568 568
569 569 repository = relationship('Repository', viewonly=True)
570 570
571 571 def __repr__(self):
572 572 return '<%s[%s:%s]%s=>%s]>' % (
573 573 self.cls_name, self.repository.repo_name,
574 574 self.ui_section, self.ui_key, self.ui_value)
575 575
576 576
577 577 class User(Base, BaseModel):
578 578 __tablename__ = 'users'
579 579 __table_args__ = (
580 580 UniqueConstraint('username'), UniqueConstraint('email'),
581 581 Index('u_username_idx', 'username'),
582 582 Index('u_email_idx', 'email'),
583 583 base_table_args
584 584 )
585 585
586 586 DEFAULT_USER = 'default'
587 587 DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org'
588 588 DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
589 589
590 590 user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
591 591 username = Column("username", String(255), nullable=True, unique=None, default=None)
592 592 password = Column("password", String(255), nullable=True, unique=None, default=None)
593 593 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
594 594 admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
595 595 name = Column("firstname", String(255), nullable=True, unique=None, default=None)
596 596 lastname = Column("lastname", String(255), nullable=True, unique=None, default=None)
597 597 _email = Column("email", String(255), nullable=True, unique=None, default=None)
598 598 last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
599 599 last_activity = Column('last_activity', DateTime(timezone=False), nullable=True, unique=None, default=None)
600 600 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
601 601
602 602 extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None)
603 603 extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None)
604 604 _api_key = Column("api_key", String(255), nullable=True, unique=None, default=None)
605 605 inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
606 606 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
607 607 _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data
608 608
609 609 user_log = relationship('UserLog', back_populates='user')
610 610 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all, delete-orphan')
611 611
612 612 repositories = relationship('Repository', back_populates='user')
613 613 repository_groups = relationship('RepoGroup', back_populates='user')
614 614 user_groups = relationship('UserGroup', back_populates='user')
615 615
616 616 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all', back_populates='follows_user')
617 617 followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all', back_populates='user')
618 618
619 619 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all, delete-orphan')
620 620 repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all, delete-orphan', back_populates='user')
621 621 user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all, delete-orphan', back_populates='user')
622 622
623 623 group_member = relationship('UserGroupMember', cascade='all', back_populates='user')
624 624
625 625 notifications = relationship('UserNotification', cascade='all', back_populates='user')
626 626 # notifications assigned to this user
627 627 user_created_notifications = relationship('Notification', cascade='all', back_populates='created_by_user')
628 628 # comments created by this user
629 629 user_comments = relationship('ChangesetComment', cascade='all', back_populates='author')
630 630 # user profile extra info
631 631 user_emails = relationship('UserEmailMap', cascade='all', back_populates='user')
632 632 user_ip_map = relationship('UserIpMap', cascade='all', back_populates='user')
633 633 user_auth_tokens = relationship('UserApiKeys', cascade='all', back_populates='user')
634 634 user_ssh_keys = relationship('UserSshKeys', cascade='all', back_populates='user')
635 635
636 636 # gists
637 637 user_gists = relationship('Gist', cascade='all', back_populates='owner')
638 638 # user pull requests
639 639 user_pull_requests = relationship('PullRequest', cascade='all', back_populates='author')
640 640
641 641 # external identities
642 642 external_identities = relationship('ExternalIdentity', primaryjoin="User.user_id==ExternalIdentity.local_user_id", cascade='all')
643 643 # review rules
644 644 user_review_rules = relationship('RepoReviewRuleUser', cascade='all', back_populates='user')
645 645
646 646 # artifacts owned
647 647 artifacts = relationship('FileStore', primaryjoin='FileStore.user_id==User.user_id', back_populates='upload_user')
648 648
649 649 # no cascade, set NULL
650 650 scope_artifacts = relationship('FileStore', primaryjoin='FileStore.scope_user_id==User.user_id', cascade='', back_populates='user')
651 651
652 652 def __repr__(self):
653 653 return f"<{self.cls_name}('id={self.user_id}, username={self.username}')>"
654 654
655 655 @hybrid_property
656 656 def email(self):
657 657 return self._email
658 658
659 659 @email.setter
660 660 def email(self, val):
661 661 self._email = val.lower() if val else None
662 662
663 663 @hybrid_property
664 664 def first_name(self):
665 665 from rhodecode.lib import helpers as h
666 666 if self.name:
667 667 return h.escape(self.name)
668 668 return self.name
669 669
670 670 @hybrid_property
671 671 def last_name(self):
672 672 from rhodecode.lib import helpers as h
673 673 if self.lastname:
674 674 return h.escape(self.lastname)
675 675 return self.lastname
676 676
677 677 @hybrid_property
678 678 def api_key(self):
679 679 """
680 680 Fetch if exist an auth-token with role ALL connected to this user
681 681 """
682 682 user_auth_token = UserApiKeys.query()\
683 683 .filter(UserApiKeys.user_id == self.user_id)\
684 684 .filter(or_(UserApiKeys.expires == -1,
685 685 UserApiKeys.expires >= time.time()))\
686 686 .filter(UserApiKeys.role == UserApiKeys.ROLE_ALL).first()
687 687 if user_auth_token:
688 688 user_auth_token = user_auth_token.api_key
689 689
690 690 return user_auth_token
691 691
692 692 @api_key.setter
693 693 def api_key(self, val):
694 694 # don't allow to set API key this is deprecated for now
695 695 self._api_key = None
696 696
697 697 @property
698 698 def reviewer_pull_requests(self):
699 699 return PullRequestReviewers.query() \
700 700 .options(joinedload(PullRequestReviewers.pull_request)) \
701 701 .filter(PullRequestReviewers.user_id == self.user_id) \
702 702 .all()
703 703
704 704 @property
705 705 def firstname(self):
706 706 # alias for future
707 707 return self.name
708 708
709 709 @property
710 710 def emails(self):
711 711 other = UserEmailMap.query()\
712 712 .filter(UserEmailMap.user == self) \
713 713 .order_by(UserEmailMap.email_id.asc()) \
714 714 .all()
715 715 return [self.email] + [x.email for x in other]
716 716
717 717 def emails_cached(self):
718 718 emails = []
719 719 if self.user_id != self.get_default_user_id():
720 720 emails = UserEmailMap.query()\
721 721 .filter(UserEmailMap.user == self) \
722 722 .order_by(UserEmailMap.email_id.asc())
723 723
724 724 emails = emails.options(
725 725 FromCache("sql_cache_short", f"get_user_{self.user_id}_emails")
726 726 )
727 727
728 728 return [self.email] + [x.email for x in emails]
729 729
730 730 @property
731 731 def auth_tokens(self):
732 732 auth_tokens = self.get_auth_tokens()
733 733 return [x.api_key for x in auth_tokens]
734 734
735 735 def get_auth_tokens(self):
736 736 return UserApiKeys.query()\
737 737 .filter(UserApiKeys.user == self)\
738 738 .order_by(UserApiKeys.user_api_key_id.asc())\
739 739 .all()
740 740
741 741 @LazyProperty
742 742 def feed_token(self):
743 743 return self.get_feed_token()
744 744
745 745 def get_feed_token(self, cache=True):
746 746 feed_tokens = UserApiKeys.query()\
747 747 .filter(UserApiKeys.user == self)\
748 748 .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED)
749 749 if cache:
750 750 feed_tokens = feed_tokens.options(
751 751 FromCache("sql_cache_short", f"get_user_feed_token_{self.user_id}"))
752 752
753 753 feed_tokens = feed_tokens.all()
754 754 if feed_tokens:
755 755 return feed_tokens[0].api_key
756 756 return 'NO_FEED_TOKEN_AVAILABLE'
757 757
758 758 @LazyProperty
759 759 def artifact_token(self):
760 760 return self.get_artifact_token()
761 761
762 762 def get_artifact_token(self, cache=True):
763 763 artifacts_tokens = UserApiKeys.query()\
764 764 .filter(UserApiKeys.user == self) \
765 765 .filter(or_(UserApiKeys.expires == -1,
766 766 UserApiKeys.expires >= time.time())) \
767 767 .filter(UserApiKeys.role == UserApiKeys.ROLE_ARTIFACT_DOWNLOAD)
768 768
769 769 if cache:
770 770 artifacts_tokens = artifacts_tokens.options(
771 771 FromCache("sql_cache_short", f"get_user_artifact_token_{self.user_id}"))
772 772
773 773 artifacts_tokens = artifacts_tokens.all()
774 774 if artifacts_tokens:
775 775 return artifacts_tokens[0].api_key
776 776 return 'NO_ARTIFACT_TOKEN_AVAILABLE'
777 777
778 778 def get_or_create_artifact_token(self):
779 779 artifacts_tokens = UserApiKeys.query()\
780 780 .filter(UserApiKeys.user == self) \
781 781 .filter(or_(UserApiKeys.expires == -1,
782 782 UserApiKeys.expires >= time.time())) \
783 783 .filter(UserApiKeys.role == UserApiKeys.ROLE_ARTIFACT_DOWNLOAD)
784 784
785 785 artifacts_tokens = artifacts_tokens.all()
786 786 if artifacts_tokens:
787 787 return artifacts_tokens[0].api_key
788 788 else:
789 789 from rhodecode.model.auth_token import AuthTokenModel
790 790 artifact_token = AuthTokenModel().create(
791 791 self, 'auto-generated-artifact-token',
792 792 lifetime=-1, role=UserApiKeys.ROLE_ARTIFACT_DOWNLOAD)
793 793 Session.commit()
794 794 return artifact_token.api_key
795 795
796 796 @classmethod
797 797 def get(cls, user_id, cache=False):
798 798 if not user_id:
799 799 return
800 800
801 801 user = cls.query()
802 802 if cache:
803 803 user = user.options(
804 804 FromCache("sql_cache_short", f"get_users_{user_id}"))
805 805 return user.get(user_id)
806 806
807 807 @classmethod
808 808 def extra_valid_auth_tokens(cls, user, role=None):
809 809 tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\
810 810 .filter(or_(UserApiKeys.expires == -1,
811 811 UserApiKeys.expires >= time.time()))
812 812 if role:
813 813 tokens = tokens.filter(or_(UserApiKeys.role == role,
814 814 UserApiKeys.role == UserApiKeys.ROLE_ALL))
815 815 return tokens.all()
816 816
817 817 def authenticate_by_token(self, auth_token, roles=None, scope_repo_id=None):
818 818 from rhodecode.lib import auth
819 819
820 820 log.debug('Trying to authenticate user: %s via auth-token, '
821 821 'and roles: %s', self, roles)
822 822
823 823 if not auth_token:
824 824 return False
825 825
826 826 roles = (roles or []) + [UserApiKeys.ROLE_ALL]
827 827 tokens_q = UserApiKeys.query()\
828 828 .filter(UserApiKeys.user_id == self.user_id)\
829 829 .filter(or_(UserApiKeys.expires == -1,
830 830 UserApiKeys.expires >= time.time()))
831 831
832 832 tokens_q = tokens_q.filter(UserApiKeys.role.in_(roles))
833 833
834 834 crypto_backend = auth.crypto_backend()
835 835 enc_token_map = {}
836 836 plain_token_map = {}
837 837 for token in tokens_q:
838 838 if token.api_key.startswith(crypto_backend.ENC_PREF):
839 839 enc_token_map[token.api_key] = token
840 840 else:
841 841 plain_token_map[token.api_key] = token
842 842 log.debug(
843 843 'Found %s plain and %s encrypted tokens to check for authentication for this user',
844 844 len(plain_token_map), len(enc_token_map))
845 845
846 846 # plain token match comes first
847 847 match = plain_token_map.get(auth_token)
848 848
849 849 # check encrypted tokens now
850 850 if not match:
851 851 for token_hash, token in enc_token_map.items():
852 852 # NOTE(marcink): this is expensive to calculate, but most secure
853 853 if crypto_backend.hash_check(auth_token, token_hash):
854 854 match = token
855 855 break
856 856
857 857 if match:
858 858 log.debug('Found matching token %s', match)
859 859 if match.repo_id:
860 860 log.debug('Found scope, checking for scope match of token %s', match)
861 861 if match.repo_id == scope_repo_id:
862 862 return True
863 863 else:
864 864 log.debug(
865 865 'AUTH_TOKEN: scope mismatch, token has a set repo scope: %s, '
866 866 'and calling scope is:%s, skipping further checks',
867 867 match.repo, scope_repo_id)
868 868 return False
869 869 else:
870 870 return True
871 871
872 872 return False
873 873
874 874 @property
875 875 def ip_addresses(self):
876 876 ret = UserIpMap.query().filter(UserIpMap.user == self).all()
877 877 return [x.ip_addr for x in ret]
878 878
879 879 @property
880 880 def username_and_name(self):
881 881 return f'{self.username} ({self.first_name} {self.last_name})'
882 882
883 883 @property
884 884 def username_or_name_or_email(self):
885 885 full_name = self.full_name if self.full_name != ' ' else None
886 886 return self.username or full_name or self.email
887 887
888 888 @property
889 889 def full_name(self):
890 890 return f'{self.first_name} {self.last_name}'
891 891
892 892 @property
893 893 def full_name_or_username(self):
894 894 return (f'{self.first_name} {self.last_name}'
895 895 if (self.first_name and self.last_name) else self.username)
896 896
897 897 @property
898 898 def full_contact(self):
899 899 return f'{self.first_name} {self.last_name} <{self.email}>'
900 900
901 901 @property
902 902 def short_contact(self):
903 903 return f'{self.first_name} {self.last_name}'
904 904
905 905 @property
906 906 def is_admin(self):
907 907 return self.admin
908 908
909 909 @property
910 910 def language(self):
911 911 return self.user_data.get('language')
912 912
913 913 def AuthUser(self, **kwargs):
914 914 """
915 915 Returns instance of AuthUser for this user
916 916 """
917 917 from rhodecode.lib.auth import AuthUser
918 918 return AuthUser(user_id=self.user_id, username=self.username, **kwargs)
919 919
920 920 @hybrid_property
921 921 def user_data(self):
922 922 if not self._user_data:
923 923 return {}
924 924
925 925 try:
926 926 return json.loads(self._user_data) or {}
927 927 except TypeError:
928 928 return {}
929 929
930 930 @user_data.setter
931 931 def user_data(self, val):
932 932 if not isinstance(val, dict):
933 933 raise Exception('user_data must be dict, got %s' % type(val))
934 934 try:
935 935 self._user_data = safe_bytes(json.dumps(val))
936 936 except Exception:
937 937 log.error(traceback.format_exc())
938 938
939 939 @classmethod
940 940 def get_by_username(cls, username, case_insensitive=False,
941 941 cache=False):
942 942
943 943 if case_insensitive:
944 944 q = cls.select().where(
945 945 func.lower(cls.username) == func.lower(username))
946 946 else:
947 947 q = cls.select().where(cls.username == username)
948 948
949 949 if cache:
950 950 hash_key = _hash_key(username)
951 951 q = q.options(
952 952 FromCache("sql_cache_short", f"get_user_by_name_{hash_key}"))
953 953
954 954 return cls.execute(q).scalar_one_or_none()
955 955
956 956 @classmethod
957 957 def get_by_auth_token(cls, auth_token, cache=False):
958 958
959 959 q = cls.select(User)\
960 960 .join(UserApiKeys)\
961 961 .where(UserApiKeys.api_key == auth_token)\
962 962 .where(or_(UserApiKeys.expires == -1,
963 963 UserApiKeys.expires >= time.time()))
964 964
965 965 if cache:
966 966 q = q.options(
967 967 FromCache("sql_cache_short", f"get_auth_token_{auth_token}"))
968 968
969 969 matched_user = cls.execute(q).scalar_one_or_none()
970 970
971 971 return matched_user
972 972
973 973 @classmethod
974 974 def get_by_email(cls, email, case_insensitive=False, cache=False):
975 975
976 976 if case_insensitive:
977 977 q = cls.select().where(func.lower(cls.email) == func.lower(email))
978 978 else:
979 979 q = cls.select().where(cls.email == email)
980 980
981 981 if cache:
982 982 email_key = _hash_key(email)
983 983 q = q.options(
984 984 FromCache("sql_cache_short", f"get_email_key_{email_key}"))
985 985
986 986 ret = cls.execute(q).scalar_one_or_none()
987 987
988 988 if ret is None:
989 989 q = cls.select(UserEmailMap)
990 990 # try fetching in alternate email map
991 991 if case_insensitive:
992 992 q = q.where(func.lower(UserEmailMap.email) == func.lower(email))
993 993 else:
994 994 q = q.where(UserEmailMap.email == email)
995 995 q = q.options(joinedload(UserEmailMap.user))
996 996 if cache:
997 997 q = q.options(
998 998 FromCache("sql_cache_short", f"get_email_map_key_{email_key}"))
999 999
1000 1000 result = cls.execute(q).scalar_one_or_none()
1001 1001 ret = getattr(result, 'user', None)
1002 1002
1003 1003 return ret
1004 1004
1005 1005 @classmethod
1006 1006 def get_from_cs_author(cls, author):
1007 1007 """
1008 1008 Tries to get User objects out of commit author string
1009 1009
1010 1010 :param author:
1011 1011 """
1012 1012 from rhodecode.lib.helpers import email, author_name
1013 1013 # Valid email in the attribute passed, see if they're in the system
1014 1014 _email = email(author)
1015 1015 if _email:
1016 1016 user = cls.get_by_email(_email, case_insensitive=True)
1017 1017 if user:
1018 1018 return user
1019 1019 # Maybe we can match by username?
1020 1020 _author = author_name(author)
1021 1021 user = cls.get_by_username(_author, case_insensitive=True)
1022 1022 if user:
1023 1023 return user
1024 1024
1025 1025 def update_userdata(self, **kwargs):
1026 1026 usr = self
1027 1027 old = usr.user_data
1028 1028 old.update(**kwargs)
1029 1029 usr.user_data = old
1030 1030 Session().add(usr)
1031 1031 log.debug('updated userdata with %s', kwargs)
1032 1032
1033 1033 def update_lastlogin(self):
1034 1034 """Update user lastlogin"""
1035 1035 self.last_login = datetime.datetime.now()
1036 1036 Session().add(self)
1037 1037 log.debug('updated user %s lastlogin', self.username)
1038 1038
1039 1039 def update_password(self, new_password):
1040 1040 from rhodecode.lib.auth import get_crypt_password
1041 1041
1042 1042 self.password = get_crypt_password(new_password)
1043 1043 Session().add(self)
1044 1044
1045 1045 @classmethod
1046 1046 def get_first_super_admin(cls):
1047 1047 stmt = cls.select().where(User.admin == true()).order_by(User.user_id.asc())
1048 1048 user = cls.scalars(stmt).first()
1049 1049
1050 1050 if user is None:
1051 1051 raise Exception('FATAL: Missing administrative account!')
1052 1052 return user
1053 1053
1054 1054 @classmethod
1055 1055 def get_all_super_admins(cls, only_active=False):
1056 1056 """
1057 1057 Returns all admin accounts sorted by username
1058 1058 """
1059 1059 qry = User.query().filter(User.admin == true()).order_by(User.username.asc())
1060 1060 if only_active:
1061 1061 qry = qry.filter(User.active == true())
1062 1062 return qry.all()
1063 1063
1064 1064 @classmethod
1065 1065 def get_all_user_ids(cls, only_active=True):
1066 1066 """
1067 1067 Returns all users IDs
1068 1068 """
1069 1069 qry = Session().query(User.user_id)
1070 1070
1071 1071 if only_active:
1072 1072 qry = qry.filter(User.active == true())
1073 1073 return [x.user_id for x in qry]
1074 1074
1075 1075 @classmethod
1076 1076 def get_default_user(cls, cache=False, refresh=False):
1077 1077 user = User.get_by_username(User.DEFAULT_USER, cache=cache)
1078 1078 if user is None:
1079 1079 raise Exception('FATAL: Missing default account!')
1080 1080 if refresh:
1081 1081 # The default user might be based on outdated state which
1082 1082 # has been loaded from the cache.
1083 1083 # A call to refresh() ensures that the
1084 1084 # latest state from the database is used.
1085 1085 Session().refresh(user)
1086 1086
1087 1087 return user
1088 1088
1089 1089 @classmethod
1090 1090 def get_default_user_id(cls):
1091 1091 import rhodecode
1092 1092 return rhodecode.CONFIG['default_user_id']
1093 1093
1094 1094 def _get_default_perms(self, user, suffix=''):
1095 1095 from rhodecode.model.permission import PermissionModel
1096 1096 return PermissionModel().get_default_perms(user.user_perms, suffix)
1097 1097
1098 1098 def get_default_perms(self, suffix=''):
1099 1099 return self._get_default_perms(self, suffix)
1100 1100
1101 1101 def get_api_data(self, include_secrets=False, details='full'):
1102 1102 """
1103 1103 Common function for generating user related data for API
1104 1104
1105 1105 :param include_secrets: By default secrets in the API data will be replaced
1106 1106 by a placeholder value to prevent exposing this data by accident. In case
1107 1107 this data shall be exposed, set this flag to ``True``.
1108 1108
1109 1109 :param details: details can be 'basic|full' basic gives only a subset of
1110 1110 the available user information that includes user_id, name and emails.
1111 1111 """
1112 1112 user = self
1113 1113 user_data = self.user_data
1114 1114 data = {
1115 1115 'user_id': user.user_id,
1116 1116 'username': user.username,
1117 1117 'firstname': user.name,
1118 1118 'lastname': user.lastname,
1119 1119 'description': user.description,
1120 1120 'email': user.email,
1121 1121 'emails': user.emails,
1122 1122 }
1123 1123 if details == 'basic':
1124 1124 return data
1125 1125
1126 1126 auth_token_length = 40
1127 1127 auth_token_replacement = '*' * auth_token_length
1128 1128
1129 1129 extras = {
1130 1130 'auth_tokens': [auth_token_replacement],
1131 1131 'active': user.active,
1132 1132 'admin': user.admin,
1133 1133 'extern_type': user.extern_type,
1134 1134 'extern_name': user.extern_name,
1135 1135 'last_login': user.last_login,
1136 1136 'last_activity': user.last_activity,
1137 1137 'ip_addresses': user.ip_addresses,
1138 1138 'language': user_data.get('language')
1139 1139 }
1140 1140 data.update(extras)
1141 1141
1142 1142 if include_secrets:
1143 1143 data['auth_tokens'] = user.auth_tokens
1144 1144 return data
1145 1145
1146 1146 def __json__(self):
1147 1147 data = {
1148 1148 'full_name': self.full_name,
1149 1149 'full_name_or_username': self.full_name_or_username,
1150 1150 'short_contact': self.short_contact,
1151 1151 'full_contact': self.full_contact,
1152 1152 }
1153 1153 data.update(self.get_api_data())
1154 1154 return data
1155 1155
1156 1156
1157 1157 class UserApiKeys(Base, BaseModel):
1158 1158 __tablename__ = 'user_api_keys'
1159 1159 __table_args__ = (
1160 1160 Index('uak_api_key_idx', 'api_key'),
1161 1161 Index('uak_api_key_expires_idx', 'api_key', 'expires'),
1162 1162 base_table_args
1163 1163 )
1164 1164
1165 1165 # ApiKey role
1166 1166 ROLE_ALL = 'token_role_all'
1167 1167 ROLE_VCS = 'token_role_vcs'
1168 1168 ROLE_API = 'token_role_api'
1169 1169 ROLE_HTTP = 'token_role_http'
1170 1170 ROLE_FEED = 'token_role_feed'
1171 1171 ROLE_ARTIFACT_DOWNLOAD = 'role_artifact_download'
1172 1172 # The last one is ignored in the list as we only
1173 1173 # use it for one action, and cannot be created by users
1174 1174 ROLE_PASSWORD_RESET = 'token_password_reset'
1175 1175
1176 1176 ROLES = [ROLE_ALL, ROLE_VCS, ROLE_API, ROLE_HTTP, ROLE_FEED, ROLE_ARTIFACT_DOWNLOAD]
1177 1177
1178 1178 user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1179 1179 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1180 1180 api_key = Column("api_key", String(255), nullable=False, unique=True)
1181 1181 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1182 1182 expires = Column('expires', Float(53), nullable=False)
1183 1183 role = Column('role', String(255), nullable=True)
1184 1184 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1185 1185
1186 1186 # scope columns
1187 1187 repo_id = Column(
1188 1188 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
1189 1189 nullable=True, unique=None, default=None)
1190 1190 repo = relationship('Repository', lazy='joined', back_populates='scoped_tokens')
1191 1191
1192 1192 repo_group_id = Column(
1193 1193 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
1194 1194 nullable=True, unique=None, default=None)
1195 1195 repo_group = relationship('RepoGroup', lazy='joined')
1196 1196
1197 1197 user = relationship('User', lazy='joined', back_populates='user_auth_tokens')
1198 1198
1199 1199 def __repr__(self):
1200 1200 return f"<{self.cls_name}('{self.role}')>"
1201 1201
1202 1202 def __json__(self):
1203 1203 data = {
1204 1204 'auth_token': self.api_key,
1205 1205 'role': self.role,
1206 1206 'scope': self.scope_humanized,
1207 1207 'expired': self.expired
1208 1208 }
1209 1209 return data
1210 1210
1211 1211 def get_api_data(self, include_secrets=False):
1212 1212 data = self.__json__()
1213 1213 if include_secrets:
1214 1214 return data
1215 1215 else:
1216 1216 data['auth_token'] = self.token_obfuscated
1217 1217 return data
1218 1218
1219 1219 @hybrid_property
1220 1220 def description_safe(self):
1221 1221 from rhodecode.lib import helpers as h
1222 1222 return h.escape(self.description)
1223 1223
1224 1224 @property
1225 1225 def expired(self):
1226 1226 if self.expires == -1:
1227 1227 return False
1228 1228 return time.time() > self.expires
1229 1229
1230 1230 @classmethod
1231 1231 def _get_role_name(cls, role):
1232 1232 return {
1233 1233 cls.ROLE_ALL: _('all'),
1234 1234 cls.ROLE_HTTP: _('http/web interface'),
1235 1235 cls.ROLE_VCS: _('vcs (git/hg/svn protocol)'),
1236 1236 cls.ROLE_API: _('api calls'),
1237 1237 cls.ROLE_FEED: _('feed access'),
1238 1238 cls.ROLE_ARTIFACT_DOWNLOAD: _('artifacts downloads'),
1239 1239 }.get(role, role)
1240 1240
1241 1241 @classmethod
1242 1242 def _get_role_description(cls, role):
1243 1243 return {
1244 1244 cls.ROLE_ALL: _('Token for all actions.'),
1245 1245 cls.ROLE_HTTP: _('Token to access RhodeCode pages via web interface without '
1246 1246 'login using `api_access_controllers_whitelist` functionality.'),
1247 1247 cls.ROLE_VCS: _('Token to interact over git/hg/svn protocols. '
1248 1248 'Requires auth_token authentication plugin to be active. <br/>'
1249 1249 'Such Token should be used then instead of a password to '
1250 1250 'interact with a repository, and additionally can be '
1251 1251 'limited to single repository using repo scope.'),
1252 1252 cls.ROLE_API: _('Token limited to api calls.'),
1253 1253 cls.ROLE_FEED: _('Token to read RSS/ATOM feed.'),
1254 1254 cls.ROLE_ARTIFACT_DOWNLOAD: _('Token for artifacts downloads.'),
1255 1255 }.get(role, role)
1256 1256
1257 1257 @property
1258 1258 def role_humanized(self):
1259 1259 return self._get_role_name(self.role)
1260 1260
1261 1261 def _get_scope(self):
1262 1262 if self.repo:
1263 1263 return 'Repository: {}'.format(self.repo.repo_name)
1264 1264 if self.repo_group:
1265 1265 return 'RepositoryGroup: {} (recursive)'.format(self.repo_group.group_name)
1266 1266 return 'Global'
1267 1267
1268 1268 @property
1269 1269 def scope_humanized(self):
1270 1270 return self._get_scope()
1271 1271
1272 1272 @property
1273 1273 def token_obfuscated(self):
1274 1274 if self.api_key:
1275 1275 return self.api_key[:4] + "****"
1276 1276
1277 1277
1278 1278 class UserEmailMap(Base, BaseModel):
1279 1279 __tablename__ = 'user_email_map'
1280 1280 __table_args__ = (
1281 1281 Index('uem_email_idx', 'email'),
1282 1282 Index('uem_user_id_idx', 'user_id'),
1283 1283 UniqueConstraint('email'),
1284 1284 base_table_args
1285 1285 )
1286 1286
1287 1287 email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1288 1288 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1289 1289 _email = Column("email", String(255), nullable=True, unique=False, default=None)
1290 1290 user = relationship('User', lazy='joined', back_populates='user_emails')
1291 1291
1292 1292 @validates('_email')
1293 1293 def validate_email(self, key, email):
1294 1294 # check if this email is not main one
1295 1295 main_email = Session().query(User).filter(User.email == email).scalar()
1296 1296 if main_email is not None:
1297 1297 raise AttributeError('email %s is present is user table' % email)
1298 1298 return email
1299 1299
1300 1300 @hybrid_property
1301 1301 def email(self):
1302 1302 return self._email
1303 1303
1304 1304 @email.setter
1305 1305 def email(self, val):
1306 1306 self._email = val.lower() if val else None
1307 1307
1308 1308
1309 1309 class UserIpMap(Base, BaseModel):
1310 1310 __tablename__ = 'user_ip_map'
1311 1311 __table_args__ = (
1312 1312 UniqueConstraint('user_id', 'ip_addr'),
1313 1313 base_table_args
1314 1314 )
1315 1315
1316 1316 ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1317 1317 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1318 1318 ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None)
1319 1319 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
1320 1320 description = Column("description", String(10000), nullable=True, unique=None, default=None)
1321 1321 user = relationship('User', lazy='joined', back_populates='user_ip_map')
1322 1322
1323 1323 @hybrid_property
1324 1324 def description_safe(self):
1325 1325 from rhodecode.lib import helpers as h
1326 1326 return h.escape(self.description)
1327 1327
1328 1328 @classmethod
1329 1329 def _get_ip_range(cls, ip_addr):
1330 1330 net = ipaddress.ip_network(safe_str(ip_addr), strict=False)
1331 1331 return [str(net.network_address), str(net.broadcast_address)]
1332 1332
1333 1333 def __json__(self):
1334 1334 return {
1335 1335 'ip_addr': self.ip_addr,
1336 1336 'ip_range': self._get_ip_range(self.ip_addr),
1337 1337 }
1338 1338
1339 1339 def __repr__(self):
1340 1340 return f"<{self.cls_name}('user_id={self.user_id} => ip={self.ip_addr}')>"
1341 1341
1342 1342
1343 1343 class UserSshKeys(Base, BaseModel):
1344 1344 __tablename__ = 'user_ssh_keys'
1345 1345 __table_args__ = (
1346 1346 Index('usk_ssh_key_fingerprint_idx', 'ssh_key_fingerprint'),
1347 1347
1348 1348 UniqueConstraint('ssh_key_fingerprint'),
1349 1349
1350 1350 base_table_args
1351 1351 )
1352 1352
1353 1353 ssh_key_id = Column('ssh_key_id', Integer(), nullable=False, unique=True, default=None, primary_key=True)
1354 1354 ssh_key_data = Column('ssh_key_data', String(10240), nullable=False, unique=None, default=None)
1355 1355 ssh_key_fingerprint = Column('ssh_key_fingerprint', String(255), nullable=False, unique=None, default=None)
1356 1356
1357 1357 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1358 1358
1359 1359 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1360 1360 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True, default=None)
1361 1361 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1362 1362
1363 1363 user = relationship('User', lazy='joined', back_populates='user_ssh_keys')
1364 1364
1365 1365 def __json__(self):
1366 1366 data = {
1367 1367 'ssh_fingerprint': self.ssh_key_fingerprint,
1368 1368 'description': self.description,
1369 1369 'created_on': self.created_on
1370 1370 }
1371 1371 return data
1372 1372
1373 1373 def get_api_data(self):
1374 1374 data = self.__json__()
1375 1375 return data
1376 1376
1377 1377
1378 1378 class UserLog(Base, BaseModel):
1379 1379 __tablename__ = 'user_logs'
1380 1380 __table_args__ = (
1381 1381 base_table_args,
1382 1382 )
1383 1383
1384 1384 VERSION_1 = 'v1'
1385 1385 VERSION_2 = 'v2'
1386 1386 VERSIONS = [VERSION_1, VERSION_2]
1387 1387
1388 1388 user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1389 1389 user_id = Column("user_id", Integer(), ForeignKey('users.user_id',ondelete='SET NULL'), nullable=True, unique=None, default=None)
1390 1390 username = Column("username", String(255), nullable=True, unique=None, default=None)
1391 1391 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id', ondelete='SET NULL'), nullable=True, unique=None, default=None)
1392 1392 repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None)
1393 1393 user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None)
1394 1394 action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None)
1395 1395 action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
1396 1396
1397 1397 version = Column("version", String(255), nullable=True, default=VERSION_1)
1398 1398 user_data = Column('user_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1399 1399 action_data = Column('action_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1400 1400 user = relationship('User', cascade='', back_populates='user_log')
1401 1401 repository = relationship('Repository', cascade='', back_populates='logs')
1402 1402
1403 1403 def __repr__(self):
1404 1404 return f"<{self.cls_name}('id:{self.repository_name}:{self.action}')>"
1405 1405
1406 1406 def __json__(self):
1407 1407 return {
1408 1408 'user_id': self.user_id,
1409 1409 'username': self.username,
1410 1410 'repository_id': self.repository_id,
1411 1411 'repository_name': self.repository_name,
1412 1412 'user_ip': self.user_ip,
1413 1413 'action_date': self.action_date,
1414 1414 'action': self.action,
1415 1415 }
1416 1416
1417 1417 @hybrid_property
1418 1418 def entry_id(self):
1419 1419 return self.user_log_id
1420 1420
1421 1421 @property
1422 1422 def action_as_day(self):
1423 1423 return datetime.date(*self.action_date.timetuple()[:3])
1424 1424
1425 1425
1426 1426 class UserGroup(Base, BaseModel):
1427 1427 __tablename__ = 'users_groups'
1428 1428 __table_args__ = (
1429 1429 base_table_args,
1430 1430 )
1431 1431
1432 1432 users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1433 1433 users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None)
1434 1434 user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None)
1435 1435 users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
1436 1436 inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
1437 1437 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
1438 1438 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1439 1439 _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data
1440 1440
1441 1441 members = relationship('UserGroupMember', cascade="all, delete-orphan", lazy="joined", back_populates='users_group')
1442 1442 users_group_to_perm = relationship('UserGroupToPerm', cascade='all', back_populates='users_group')
1443 1443 users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all', back_populates='users_group')
1444 1444 users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all', back_populates='users_group')
1445 1445 user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all', back_populates='user_group')
1446 1446
1447 1447 user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all', back_populates='target_user_group')
1448 1448
1449 1449 user_group_review_rules = relationship('RepoReviewRuleUserGroup', cascade='all', back_populates='users_group')
1450 1450 user = relationship('User', primaryjoin="User.user_id==UserGroup.user_id", back_populates='user_groups')
1451 1451
1452 1452 @classmethod
1453 1453 def _load_group_data(cls, column):
1454 1454 if not column:
1455 1455 return {}
1456 1456
1457 1457 try:
1458 1458 return json.loads(column) or {}
1459 1459 except TypeError:
1460 1460 return {}
1461 1461
1462 1462 @hybrid_property
1463 1463 def description_safe(self):
1464 1464 from rhodecode.lib import helpers as h
1465 1465 return h.escape(self.user_group_description)
1466 1466
1467 1467 @hybrid_property
1468 1468 def group_data(self):
1469 1469 return self._load_group_data(self._group_data)
1470 1470
1471 1471 @group_data.expression
1472 1472 def group_data(self, **kwargs):
1473 1473 return self._group_data
1474 1474
1475 1475 @group_data.setter
1476 1476 def group_data(self, val):
1477 1477 try:
1478 1478 self._group_data = json.dumps(val)
1479 1479 except Exception:
1480 1480 log.error(traceback.format_exc())
1481 1481
1482 1482 @classmethod
1483 1483 def _load_sync(cls, group_data):
1484 1484 if group_data:
1485 1485 return group_data.get('extern_type')
1486 1486
1487 1487 @property
1488 1488 def sync(self):
1489 1489 return self._load_sync(self.group_data)
1490 1490
1491 1491 def __repr__(self):
1492 1492 return f"<{self.cls_name}('id:{self.users_group_id}:{self.users_group_name}')>"
1493 1493
1494 1494 @classmethod
1495 1495 def get_by_group_name(cls, group_name, cache=False,
1496 1496 case_insensitive=False):
1497 1497 if case_insensitive:
1498 1498 q = cls.query().filter(func.lower(cls.users_group_name) ==
1499 1499 func.lower(group_name))
1500 1500
1501 1501 else:
1502 1502 q = cls.query().filter(cls.users_group_name == group_name)
1503 1503 if cache:
1504 1504 name_key = _hash_key(group_name)
1505 1505 q = q.options(
1506 1506 FromCache("sql_cache_short", f"get_group_{name_key}"))
1507 1507 return q.scalar()
1508 1508
1509 1509 @classmethod
1510 1510 def get(cls, user_group_id, cache=False):
1511 1511 if not user_group_id:
1512 1512 return
1513 1513
1514 1514 user_group = cls.query()
1515 1515 if cache:
1516 1516 user_group = user_group.options(
1517 1517 FromCache("sql_cache_short", "get_users_group_%s" % user_group_id))
1518 1518 return user_group.get(user_group_id)
1519 1519
1520 1520 def permissions(self, with_admins=True, with_owner=True,
1521 1521 expand_from_user_groups=False):
1522 1522 """
1523 1523 Permissions for user groups
1524 1524 """
1525 1525 _admin_perm = 'usergroup.admin'
1526 1526
1527 1527 owner_row = []
1528 1528 if with_owner:
1529 1529 usr = AttributeDict(self.user.get_dict())
1530 1530 usr.owner_row = True
1531 1531 usr.permission = _admin_perm
1532 1532 owner_row.append(usr)
1533 1533
1534 1534 super_admin_ids = []
1535 1535 super_admin_rows = []
1536 1536 if with_admins:
1537 1537 for usr in User.get_all_super_admins():
1538 1538 super_admin_ids.append(usr.user_id)
1539 1539 # if this admin is also owner, don't double the record
1540 1540 if usr.user_id == owner_row[0].user_id:
1541 1541 owner_row[0].admin_row = True
1542 1542 else:
1543 1543 usr = AttributeDict(usr.get_dict())
1544 1544 usr.admin_row = True
1545 1545 usr.permission = _admin_perm
1546 1546 super_admin_rows.append(usr)
1547 1547
1548 1548 q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self)
1549 1549 q = q.options(joinedload(UserUserGroupToPerm.user_group),
1550 1550 joinedload(UserUserGroupToPerm.user),
1551 1551 joinedload(UserUserGroupToPerm.permission),)
1552 1552
1553 1553 # get owners and admins and permissions. We do a trick of re-writing
1554 1554 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1555 1555 # has a global reference and changing one object propagates to all
1556 1556 # others. This means if admin is also an owner admin_row that change
1557 1557 # would propagate to both objects
1558 1558 perm_rows = []
1559 1559 for _usr in q.all():
1560 1560 usr = AttributeDict(_usr.user.get_dict())
1561 1561 # if this user is also owner/admin, mark as duplicate record
1562 1562 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
1563 1563 usr.duplicate_perm = True
1564 1564 usr.permission = _usr.permission.permission_name
1565 1565 perm_rows.append(usr)
1566 1566
1567 1567 # filter the perm rows by 'default' first and then sort them by
1568 1568 # admin,write,read,none permissions sorted again alphabetically in
1569 1569 # each group
1570 1570 perm_rows = sorted(perm_rows, key=display_user_sort)
1571 1571
1572 1572 user_groups_rows = []
1573 1573 if expand_from_user_groups:
1574 1574 for ug in self.permission_user_groups(with_members=True):
1575 1575 for user_data in ug.members:
1576 1576 user_groups_rows.append(user_data)
1577 1577
1578 1578 return super_admin_rows + owner_row + perm_rows + user_groups_rows
1579 1579
1580 1580 def permission_user_groups(self, with_members=False):
1581 1581 q = UserGroupUserGroupToPerm.query()\
1582 1582 .filter(UserGroupUserGroupToPerm.target_user_group == self)
1583 1583 q = q.options(joinedload(UserGroupUserGroupToPerm.user_group),
1584 1584 joinedload(UserGroupUserGroupToPerm.target_user_group),
1585 1585 joinedload(UserGroupUserGroupToPerm.permission),)
1586 1586
1587 1587 perm_rows = []
1588 1588 for _user_group in q.all():
1589 1589 entry = AttributeDict(_user_group.user_group.get_dict())
1590 1590 entry.permission = _user_group.permission.permission_name
1591 1591 if with_members:
1592 1592 entry.members = [x.user.get_dict()
1593 1593 for x in _user_group.user_group.members]
1594 1594 perm_rows.append(entry)
1595 1595
1596 1596 perm_rows = sorted(perm_rows, key=display_user_group_sort)
1597 1597 return perm_rows
1598 1598
1599 1599 def _get_default_perms(self, user_group, suffix=''):
1600 1600 from rhodecode.model.permission import PermissionModel
1601 1601 return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix)
1602 1602
1603 1603 def get_default_perms(self, suffix=''):
1604 1604 return self._get_default_perms(self, suffix)
1605 1605
1606 1606 def get_api_data(self, with_group_members=True, include_secrets=False):
1607 1607 """
1608 1608 :param include_secrets: See :meth:`User.get_api_data`, this parameter is
1609 1609 basically forwarded.
1610 1610
1611 1611 """
1612 1612 user_group = self
1613 1613 data = {
1614 1614 'users_group_id': user_group.users_group_id,
1615 1615 'group_name': user_group.users_group_name,
1616 1616 'group_description': user_group.user_group_description,
1617 1617 'active': user_group.users_group_active,
1618 1618 'owner': user_group.user.username,
1619 1619 'sync': user_group.sync,
1620 1620 'owner_email': user_group.user.email,
1621 1621 }
1622 1622
1623 1623 if with_group_members:
1624 1624 users = []
1625 1625 for user in user_group.members:
1626 1626 user = user.user
1627 1627 users.append(user.get_api_data(include_secrets=include_secrets))
1628 1628 data['users'] = users
1629 1629
1630 1630 return data
1631 1631
1632 1632
1633 1633 class UserGroupMember(Base, BaseModel):
1634 1634 __tablename__ = 'users_groups_members'
1635 1635 __table_args__ = (
1636 1636 base_table_args,
1637 1637 )
1638 1638
1639 1639 users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1640 1640 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
1641 1641 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
1642 1642
1643 1643 user = relationship('User', lazy='joined', back_populates='group_member')
1644 1644 users_group = relationship('UserGroup', back_populates='members')
1645 1645
1646 1646 def __init__(self, gr_id='', u_id=''):
1647 1647 self.users_group_id = gr_id
1648 1648 self.user_id = u_id
1649 1649
1650 1650
1651 1651 class RepositoryField(Base, BaseModel):
1652 1652 __tablename__ = 'repositories_fields'
1653 1653 __table_args__ = (
1654 1654 UniqueConstraint('repository_id', 'field_key'), # no-multi field
1655 1655 base_table_args,
1656 1656 )
1657 1657
1658 1658 PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields
1659 1659
1660 1660 repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1661 1661 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
1662 1662 field_key = Column("field_key", String(250))
1663 1663 field_label = Column("field_label", String(1024), nullable=False)
1664 1664 field_value = Column("field_value", String(10000), nullable=False)
1665 1665 field_desc = Column("field_desc", String(1024), nullable=False)
1666 1666 field_type = Column("field_type", String(255), nullable=False, unique=None)
1667 1667 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1668 1668
1669 1669 repository = relationship('Repository', back_populates='extra_fields')
1670 1670
1671 1671 @property
1672 1672 def field_key_prefixed(self):
1673 1673 return 'ex_%s' % self.field_key
1674 1674
1675 1675 @classmethod
1676 1676 def un_prefix_key(cls, key):
1677 1677 if key.startswith(cls.PREFIX):
1678 1678 return key[len(cls.PREFIX):]
1679 1679 return key
1680 1680
1681 1681 @classmethod
1682 1682 def get_by_key_name(cls, key, repo):
1683 1683 row = cls.query()\
1684 1684 .filter(cls.repository == repo)\
1685 1685 .filter(cls.field_key == key).scalar()
1686 1686 return row
1687 1687
1688 1688
1689 1689 class Repository(Base, BaseModel):
1690 1690 __tablename__ = 'repositories'
1691 1691 __table_args__ = (
1692 1692 Index('r_repo_name_idx', 'repo_name', mysql_length=255),
1693 1693 base_table_args,
1694 1694 )
1695 1695 DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
1696 1696 DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}'
1697 1697 DEFAULT_CLONE_URI_SSH = 'ssh://{sys_user}@{hostname}/{repo}'
1698 1698
1699 1699 STATE_CREATED = 'repo_state_created'
1700 1700 STATE_PENDING = 'repo_state_pending'
1701 1701 STATE_ERROR = 'repo_state_error'
1702 1702
1703 1703 LOCK_AUTOMATIC = 'lock_auto'
1704 1704 LOCK_API = 'lock_api'
1705 1705 LOCK_WEB = 'lock_web'
1706 1706 LOCK_PULL = 'lock_pull'
1707 1707
1708 1708 NAME_SEP = URL_SEP
1709 1709
1710 1710 repo_id = Column(
1711 1711 "repo_id", Integer(), nullable=False, unique=True, default=None,
1712 1712 primary_key=True)
1713 1713 _repo_name = Column(
1714 1714 "repo_name", Text(), nullable=False, default=None)
1715 1715 repo_name_hash = Column(
1716 1716 "repo_name_hash", String(255), nullable=False, unique=True)
1717 1717 repo_state = Column("repo_state", String(255), nullable=True)
1718 1718
1719 1719 clone_uri = Column(
1720 1720 "clone_uri", EncryptedTextValue(), nullable=True, unique=False,
1721 1721 default=None)
1722 1722 push_uri = Column(
1723 1723 "push_uri", EncryptedTextValue(), nullable=True, unique=False,
1724 1724 default=None)
1725 1725 repo_type = Column(
1726 1726 "repo_type", String(255), nullable=False, unique=False, default=None)
1727 1727 user_id = Column(
1728 1728 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
1729 1729 unique=False, default=None)
1730 1730 private = Column(
1731 1731 "private", Boolean(), nullable=True, unique=None, default=None)
1732 1732 archived = Column(
1733 1733 "archived", Boolean(), nullable=True, unique=None, default=None)
1734 1734 enable_statistics = Column(
1735 1735 "statistics", Boolean(), nullable=True, unique=None, default=True)
1736 1736 enable_downloads = Column(
1737 1737 "downloads", Boolean(), nullable=True, unique=None, default=True)
1738 1738 description = Column(
1739 1739 "description", String(10000), nullable=True, unique=None, default=None)
1740 1740 created_on = Column(
1741 1741 'created_on', DateTime(timezone=False), nullable=True, unique=None,
1742 1742 default=datetime.datetime.now)
1743 1743 updated_on = Column(
1744 1744 'updated_on', DateTime(timezone=False), nullable=True, unique=None,
1745 1745 default=datetime.datetime.now)
1746 1746 _landing_revision = Column(
1747 1747 "landing_revision", String(255), nullable=False, unique=False,
1748 1748 default=None)
1749 1749 enable_locking = Column(
1750 1750 "enable_locking", Boolean(), nullable=False, unique=None,
1751 1751 default=False)
1752 1752 _locked = Column(
1753 1753 "locked", String(255), nullable=True, unique=False, default=None)
1754 1754 _changeset_cache = Column(
1755 1755 "changeset_cache", LargeBinary(), nullable=True) # JSON data
1756 1756
1757 1757 fork_id = Column(
1758 1758 "fork_id", Integer(), ForeignKey('repositories.repo_id'),
1759 1759 nullable=True, unique=False, default=None)
1760 1760 group_id = Column(
1761 1761 "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True,
1762 1762 unique=False, default=None)
1763 1763
1764 1764 user = relationship('User', lazy='joined', back_populates='repositories')
1765 1765 fork = relationship('Repository', remote_side=repo_id, lazy='joined')
1766 1766 group = relationship('RepoGroup', lazy='joined')
1767 1767 repo_to_perm = relationship('UserRepoToPerm', cascade='all', order_by='UserRepoToPerm.repo_to_perm_id')
1768 1768 users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all', back_populates='repository')
1769 1769 stats = relationship('Statistics', cascade='all', uselist=False)
1770 1770
1771 1771 followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id', cascade='all', back_populates='follows_repository')
1772 1772 extra_fields = relationship('RepositoryField', cascade="all, delete-orphan", back_populates='repository')
1773 1773
1774 1774 logs = relationship('UserLog', back_populates='repository')
1775 1775
1776 1776 comments = relationship('ChangesetComment', cascade="all, delete-orphan", back_populates='repo')
1777 1777
1778 1778 pull_requests_source = relationship(
1779 1779 'PullRequest',
1780 1780 primaryjoin='PullRequest.source_repo_id==Repository.repo_id',
1781 1781 cascade="all, delete-orphan",
1782 1782 #back_populates="pr_source"
1783 1783 )
1784 1784 pull_requests_target = relationship(
1785 1785 'PullRequest',
1786 1786 primaryjoin='PullRequest.target_repo_id==Repository.repo_id',
1787 1787 cascade="all, delete-orphan",
1788 1788 #back_populates="pr_target"
1789 1789 )
1790 1790
1791 1791 ui = relationship('RepoRhodeCodeUi', cascade="all")
1792 1792 settings = relationship('RepoRhodeCodeSetting', cascade="all")
1793 1793 integrations = relationship('Integration', cascade="all, delete-orphan", back_populates='repo')
1794 1794
1795 1795 scoped_tokens = relationship('UserApiKeys', cascade="all", back_populates='repo')
1796 1796
1797 1797 # no cascade, set NULL
1798 1798 artifacts = relationship('FileStore', primaryjoin='FileStore.scope_repo_id==Repository.repo_id', viewonly=True)
1799 1799
1800 1800 review_rules = relationship('RepoReviewRule')
1801 1801 user_branch_perms = relationship('UserToRepoBranchPermission')
1802 1802 user_group_branch_perms = relationship('UserGroupToRepoBranchPermission')
1803 1803
1804 1804 def __repr__(self):
1805 1805 return "<%s('%s:%s')>" % (self.cls_name, self.repo_id, self.repo_name)
1806 1806
1807 1807 @hybrid_property
1808 1808 def description_safe(self):
1809 1809 from rhodecode.lib import helpers as h
1810 1810 return h.escape(self.description)
1811 1811
1812 1812 @hybrid_property
1813 1813 def landing_rev(self):
1814 1814 # always should return [rev_type, rev], e.g ['branch', 'master']
1815 1815 if self._landing_revision:
1816 1816 _rev_info = self._landing_revision.split(':')
1817 1817 if len(_rev_info) < 2:
1818 1818 _rev_info.insert(0, 'rev')
1819 1819 return [_rev_info[0], _rev_info[1]]
1820 1820 return [None, None]
1821 1821
1822 1822 @property
1823 1823 def landing_ref_type(self):
1824 1824 return self.landing_rev[0]
1825 1825
1826 1826 @property
1827 1827 def landing_ref_name(self):
1828 1828 return self.landing_rev[1]
1829 1829
1830 1830 @landing_rev.setter
1831 1831 def landing_rev(self, val):
1832 1832 if ':' not in val:
1833 1833 raise ValueError('value must be delimited with `:` and consist '
1834 1834 'of <rev_type>:<rev>, got %s instead' % val)
1835 1835 self._landing_revision = val
1836 1836
1837 1837 @hybrid_property
1838 1838 def locked(self):
1839 1839 if self._locked:
1840 1840 user_id, timelocked, reason = self._locked.split(':')
1841 1841 lock_values = int(user_id), timelocked, reason
1842 1842 else:
1843 1843 lock_values = [None, None, None]
1844 1844 return lock_values
1845 1845
1846 1846 @locked.setter
1847 1847 def locked(self, val):
1848 1848 if val and isinstance(val, (list, tuple)):
1849 1849 self._locked = ':'.join(map(str, val))
1850 1850 else:
1851 1851 self._locked = None
1852 1852
1853 1853 @classmethod
1854 1854 def _load_changeset_cache(cls, repo_id, changeset_cache_raw):
1855 1855 from rhodecode.lib.vcs.backends.base import EmptyCommit
1856 1856 dummy = EmptyCommit().__json__()
1857 1857 if not changeset_cache_raw:
1858 1858 dummy['source_repo_id'] = repo_id
1859 1859 return json.loads(json.dumps(dummy))
1860 1860
1861 1861 try:
1862 1862 return json.loads(changeset_cache_raw)
1863 1863 except TypeError:
1864 1864 return dummy
1865 1865 except Exception:
1866 1866 log.error(traceback.format_exc())
1867 1867 return dummy
1868 1868
1869 1869 @hybrid_property
1870 1870 def changeset_cache(self):
1871 1871 return self._load_changeset_cache(self.repo_id, self._changeset_cache)
1872 1872
1873 1873 @changeset_cache.setter
1874 1874 def changeset_cache(self, val):
1875 1875 try:
1876 1876 self._changeset_cache = json.dumps(val)
1877 1877 except Exception:
1878 1878 log.error(traceback.format_exc())
1879 1879
1880 1880 @hybrid_property
1881 1881 def repo_name(self):
1882 1882 return self._repo_name
1883 1883
1884 1884 @repo_name.setter
1885 1885 def repo_name(self, value):
1886 1886 self._repo_name = value
1887 1887 self.repo_name_hash = sha1(safe_bytes(value))
1888 1888
1889 1889 @classmethod
1890 1890 def normalize_repo_name(cls, repo_name):
1891 1891 """
1892 1892 Normalizes os specific repo_name to the format internally stored inside
1893 1893 database using URL_SEP
1894 1894
1895 1895 :param cls:
1896 1896 :param repo_name:
1897 1897 """
1898 1898 return cls.NAME_SEP.join(repo_name.split(os.sep))
1899 1899
1900 1900 @classmethod
1901 1901 def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False):
1902 1902 session = Session()
1903 1903 q = session.query(cls).filter(cls.repo_name == repo_name)
1904 1904
1905 1905 if cache:
1906 1906 if identity_cache:
1907 1907 val = cls.identity_cache(session, 'repo_name', repo_name)
1908 1908 if val:
1909 1909 return val
1910 1910 else:
1911 1911 cache_key = "get_repo_by_name_%s" % _hash_key(repo_name)
1912 1912 q = q.options(
1913 1913 FromCache("sql_cache_short", cache_key))
1914 1914
1915 1915 return q.scalar()
1916 1916
1917 1917 @classmethod
1918 1918 def get_by_id_or_repo_name(cls, repoid):
1919 1919 if isinstance(repoid, int):
1920 1920 try:
1921 1921 repo = cls.get(repoid)
1922 1922 except ValueError:
1923 1923 repo = None
1924 1924 else:
1925 1925 repo = cls.get_by_repo_name(repoid)
1926 1926 return repo
1927 1927
1928 1928 @classmethod
1929 1929 def get_by_full_path(cls, repo_full_path):
1930 1930 repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
1931 1931 repo_name = cls.normalize_repo_name(repo_name)
1932 1932 return cls.get_by_repo_name(repo_name.strip(URL_SEP))
1933 1933
1934 1934 @classmethod
1935 1935 def get_repo_forks(cls, repo_id):
1936 1936 return cls.query().filter(Repository.fork_id == repo_id)
1937 1937
1938 1938 @classmethod
1939 1939 def base_path(cls):
1940 1940 """
1941 1941 Returns base path when all repos are stored
1942 1942
1943 1943 :param cls:
1944 1944 """
1945 1945 from rhodecode.lib.utils import get_rhodecode_base_path
1946 1946 return get_rhodecode_base_path()
1947 1947
1948 1948 @classmethod
1949 1949 def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None),
1950 1950 case_insensitive=True, archived=False):
1951 1951 q = Repository.query()
1952 1952
1953 1953 if not archived:
1954 1954 q = q.filter(Repository.archived.isnot(true()))
1955 1955
1956 1956 if not isinstance(user_id, Optional):
1957 1957 q = q.filter(Repository.user_id == user_id)
1958 1958
1959 1959 if not isinstance(group_id, Optional):
1960 1960 q = q.filter(Repository.group_id == group_id)
1961 1961
1962 1962 if case_insensitive:
1963 1963 q = q.order_by(func.lower(Repository.repo_name))
1964 1964 else:
1965 1965 q = q.order_by(Repository.repo_name)
1966 1966
1967 1967 return q.all()
1968 1968
1969 1969 @property
1970 1970 def repo_uid(self):
1971 1971 return '_{}'.format(self.repo_id)
1972 1972
1973 1973 @property
1974 1974 def forks(self):
1975 1975 """
1976 1976 Return forks of this repo
1977 1977 """
1978 1978 return Repository.get_repo_forks(self.repo_id)
1979 1979
1980 1980 @property
1981 1981 def parent(self):
1982 1982 """
1983 1983 Returns fork parent
1984 1984 """
1985 1985 return self.fork
1986 1986
1987 1987 @property
1988 1988 def just_name(self):
1989 1989 return self.repo_name.split(self.NAME_SEP)[-1]
1990 1990
1991 1991 @property
1992 1992 def groups_with_parents(self):
1993 1993 groups = []
1994 1994 if self.group is None:
1995 1995 return groups
1996 1996
1997 1997 cur_gr = self.group
1998 1998 groups.insert(0, cur_gr)
1999 1999 while 1:
2000 2000 gr = getattr(cur_gr, 'parent_group', None)
2001 2001 cur_gr = cur_gr.parent_group
2002 2002 if gr is None:
2003 2003 break
2004 2004 groups.insert(0, gr)
2005 2005
2006 2006 return groups
2007 2007
2008 2008 @property
2009 2009 def groups_and_repo(self):
2010 2010 return self.groups_with_parents, self
2011 2011
2012 2012 @LazyProperty
2013 2013 def repo_path(self):
2014 2014 """
2015 2015 Returns base full path for that repository means where it actually
2016 2016 exists on a filesystem
2017 2017 """
2018 2018 q = Session().query(RhodeCodeUi).filter(
2019 2019 RhodeCodeUi.ui_key == self.NAME_SEP)
2020 2020 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
2021 2021 return q.one().ui_value
2022 2022
2023 2023 @property
2024 2024 def repo_full_path(self):
2025 2025 p = [self.repo_path]
2026 2026 # we need to split the name by / since this is how we store the
2027 2027 # names in the database, but that eventually needs to be converted
2028 2028 # into a valid system path
2029 2029 p += self.repo_name.split(self.NAME_SEP)
2030 2030 return os.path.join(*map(safe_str, p))
2031 2031
2032 2032 @property
2033 2033 def cache_keys(self):
2034 2034 """
2035 2035 Returns associated cache keys for that repo
2036 2036 """
2037 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
2038 repo_id=self.repo_id)
2037 repo_namespace_key = CacheKey.REPO_INVALIDATION_NAMESPACE.format(repo_id=self.repo_id)
2039 2038 return CacheKey.query()\
2040 .filter(CacheKey.cache_args == invalidation_namespace)\
2039 .filter(CacheKey.cache_key == repo_namespace_key)\
2041 2040 .order_by(CacheKey.cache_key)\
2042 2041 .all()
2043 2042
2044 2043 @property
2045 2044 def cached_diffs_relative_dir(self):
2046 2045 """
2047 2046 Return a relative to the repository store path of cached diffs
2048 2047 used for safe display for users, who shouldn't know the absolute store
2049 2048 path
2050 2049 """
2051 2050 return os.path.join(
2052 2051 os.path.dirname(self.repo_name),
2053 2052 self.cached_diffs_dir.split(os.path.sep)[-1])
2054 2053
2055 2054 @property
2056 2055 def cached_diffs_dir(self):
2057 2056 path = self.repo_full_path
2058 2057 return os.path.join(
2059 2058 os.path.dirname(path),
2060 2059 f'.__shadow_diff_cache_repo_{self.repo_id}')
2061 2060
2062 2061 def cached_diffs(self):
2063 2062 diff_cache_dir = self.cached_diffs_dir
2064 2063 if os.path.isdir(diff_cache_dir):
2065 2064 return os.listdir(diff_cache_dir)
2066 2065 return []
2067 2066
2068 2067 def shadow_repos(self):
2069 2068 shadow_repos_pattern = f'.__shadow_repo_{self.repo_id}'
2070 2069 return [
2071 2070 x for x in os.listdir(os.path.dirname(self.repo_full_path))
2072 2071 if x.startswith(shadow_repos_pattern)
2073 2072 ]
2074 2073
2075 2074 def get_new_name(self, repo_name):
2076 2075 """
2077 2076 returns new full repository name based on assigned group and new new
2078 2077
2079 2078 :param repo_name:
2080 2079 """
2081 2080 path_prefix = self.group.full_path_splitted if self.group else []
2082 2081 return self.NAME_SEP.join(path_prefix + [repo_name])
2083 2082
2084 2083 @property
2085 2084 def _config(self):
2086 2085 """
2087 2086 Returns db based config object.
2088 2087 """
2089 2088 from rhodecode.lib.utils import make_db_config
2090 2089 return make_db_config(clear_session=False, repo=self)
2091 2090
2092 2091 def permissions(self, with_admins=True, with_owner=True,
2093 2092 expand_from_user_groups=False):
2094 2093 """
2095 2094 Permissions for repositories
2096 2095 """
2097 2096 _admin_perm = 'repository.admin'
2098 2097
2099 2098 owner_row = []
2100 2099 if with_owner:
2101 2100 usr = AttributeDict(self.user.get_dict())
2102 2101 usr.owner_row = True
2103 2102 usr.permission = _admin_perm
2104 2103 usr.permission_id = None
2105 2104 owner_row.append(usr)
2106 2105
2107 2106 super_admin_ids = []
2108 2107 super_admin_rows = []
2109 2108 if with_admins:
2110 2109 for usr in User.get_all_super_admins():
2111 2110 super_admin_ids.append(usr.user_id)
2112 2111 # if this admin is also owner, don't double the record
2113 2112 if usr.user_id == owner_row[0].user_id:
2114 2113 owner_row[0].admin_row = True
2115 2114 else:
2116 2115 usr = AttributeDict(usr.get_dict())
2117 2116 usr.admin_row = True
2118 2117 usr.permission = _admin_perm
2119 2118 usr.permission_id = None
2120 2119 super_admin_rows.append(usr)
2121 2120
2122 2121 q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self)
2123 2122 q = q.options(joinedload(UserRepoToPerm.repository),
2124 2123 joinedload(UserRepoToPerm.user),
2125 2124 joinedload(UserRepoToPerm.permission),)
2126 2125
2127 2126 # get owners and admins and permissions. We do a trick of re-writing
2128 2127 # objects from sqlalchemy to named-tuples due to sqlalchemy session
2129 2128 # has a global reference and changing one object propagates to all
2130 2129 # others. This means if admin is also an owner admin_row that change
2131 2130 # would propagate to both objects
2132 2131 perm_rows = []
2133 2132 for _usr in q.all():
2134 2133 usr = AttributeDict(_usr.user.get_dict())
2135 2134 # if this user is also owner/admin, mark as duplicate record
2136 2135 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
2137 2136 usr.duplicate_perm = True
2138 2137 # also check if this permission is maybe used by branch_permissions
2139 2138 if _usr.branch_perm_entry:
2140 2139 usr.branch_rules = [x.branch_rule_id for x in _usr.branch_perm_entry]
2141 2140
2142 2141 usr.permission = _usr.permission.permission_name
2143 2142 usr.permission_id = _usr.repo_to_perm_id
2144 2143 perm_rows.append(usr)
2145 2144
2146 2145 # filter the perm rows by 'default' first and then sort them by
2147 2146 # admin,write,read,none permissions sorted again alphabetically in
2148 2147 # each group
2149 2148 perm_rows = sorted(perm_rows, key=display_user_sort)
2150 2149
2151 2150 user_groups_rows = []
2152 2151 if expand_from_user_groups:
2153 2152 for ug in self.permission_user_groups(with_members=True):
2154 2153 for user_data in ug.members:
2155 2154 user_groups_rows.append(user_data)
2156 2155
2157 2156 return super_admin_rows + owner_row + perm_rows + user_groups_rows
2158 2157
2159 2158 def permission_user_groups(self, with_members=True):
2160 2159 q = UserGroupRepoToPerm.query()\
2161 2160 .filter(UserGroupRepoToPerm.repository == self)
2162 2161 q = q.options(joinedload(UserGroupRepoToPerm.repository),
2163 2162 joinedload(UserGroupRepoToPerm.users_group),
2164 2163 joinedload(UserGroupRepoToPerm.permission),)
2165 2164
2166 2165 perm_rows = []
2167 2166 for _user_group in q.all():
2168 2167 entry = AttributeDict(_user_group.users_group.get_dict())
2169 2168 entry.permission = _user_group.permission.permission_name
2170 2169 if with_members:
2171 2170 entry.members = [x.user.get_dict()
2172 2171 for x in _user_group.users_group.members]
2173 2172 perm_rows.append(entry)
2174 2173
2175 2174 perm_rows = sorted(perm_rows, key=display_user_group_sort)
2176 2175 return perm_rows
2177 2176
2178 2177 def get_api_data(self, include_secrets=False):
2179 2178 """
2180 2179 Common function for generating repo api data
2181 2180
2182 2181 :param include_secrets: See :meth:`User.get_api_data`.
2183 2182
2184 2183 """
2185 2184 # TODO: mikhail: Here there is an anti-pattern, we probably need to
2186 2185 # move this methods on models level.
2187 2186 from rhodecode.model.settings import SettingsModel
2188 2187 from rhodecode.model.repo import RepoModel
2189 2188
2190 2189 repo = self
2191 2190 _user_id, _time, _reason = self.locked
2192 2191
2193 2192 data = {
2194 2193 'repo_id': repo.repo_id,
2195 2194 'repo_name': repo.repo_name,
2196 2195 'repo_type': repo.repo_type,
2197 2196 'clone_uri': repo.clone_uri or '',
2198 2197 'push_uri': repo.push_uri or '',
2199 2198 'url': RepoModel().get_url(self),
2200 2199 'private': repo.private,
2201 2200 'created_on': repo.created_on,
2202 2201 'description': repo.description_safe,
2203 2202 'landing_rev': repo.landing_rev,
2204 2203 'owner': repo.user.username,
2205 2204 'fork_of': repo.fork.repo_name if repo.fork else None,
2206 2205 'fork_of_id': repo.fork.repo_id if repo.fork else None,
2207 2206 'enable_statistics': repo.enable_statistics,
2208 2207 'enable_locking': repo.enable_locking,
2209 2208 'enable_downloads': repo.enable_downloads,
2210 2209 'last_changeset': repo.changeset_cache,
2211 2210 'locked_by': User.get(_user_id).get_api_data(
2212 2211 include_secrets=include_secrets) if _user_id else None,
2213 2212 'locked_date': time_to_datetime(_time) if _time else None,
2214 2213 'lock_reason': _reason if _reason else None,
2215 2214 }
2216 2215
2217 2216 # TODO: mikhail: should be per-repo settings here
2218 2217 rc_config = SettingsModel().get_all_settings()
2219 2218 repository_fields = str2bool(
2220 2219 rc_config.get('rhodecode_repository_fields'))
2221 2220 if repository_fields:
2222 2221 for f in self.extra_fields:
2223 2222 data[f.field_key_prefixed] = f.field_value
2224 2223
2225 2224 return data
2226 2225
2227 2226 @classmethod
2228 2227 def lock(cls, repo, user_id, lock_time=None, lock_reason=None):
2229 2228 if not lock_time:
2230 2229 lock_time = time.time()
2231 2230 if not lock_reason:
2232 2231 lock_reason = cls.LOCK_AUTOMATIC
2233 2232 repo.locked = [user_id, lock_time, lock_reason]
2234 2233 Session().add(repo)
2235 2234 Session().commit()
2236 2235
2237 2236 @classmethod
2238 2237 def unlock(cls, repo):
2239 2238 repo.locked = None
2240 2239 Session().add(repo)
2241 2240 Session().commit()
2242 2241
2243 2242 @classmethod
2244 2243 def getlock(cls, repo):
2245 2244 return repo.locked
2246 2245
2247 2246 def get_locking_state(self, action, user_id, only_when_enabled=True):
2248 2247 """
2249 2248 Checks locking on this repository, if locking is enabled and lock is
2250 2249 present returns a tuple of make_lock, locked, locked_by.
2251 2250 make_lock can have 3 states None (do nothing) True, make lock
2252 2251 False release lock, This value is later propagated to hooks, which
2253 2252 do the locking. Think about this as signals passed to hooks what to do.
2254 2253
2255 2254 """
2256 2255 # TODO: johbo: This is part of the business logic and should be moved
2257 2256 # into the RepositoryModel.
2258 2257
2259 2258 if action not in ('push', 'pull'):
2260 2259 raise ValueError("Invalid action value: %s" % repr(action))
2261 2260
2262 2261 # defines if locked error should be thrown to user
2263 2262 currently_locked = False
2264 2263 # defines if new lock should be made, tri-state
2265 2264 make_lock = None
2266 2265 repo = self
2267 2266 user = User.get(user_id)
2268 2267
2269 2268 lock_info = repo.locked
2270 2269
2271 2270 if repo and (repo.enable_locking or not only_when_enabled):
2272 2271 if action == 'push':
2273 2272 # check if it's already locked !, if it is compare users
2274 2273 locked_by_user_id = lock_info[0]
2275 2274 if user.user_id == locked_by_user_id:
2276 2275 log.debug(
2277 2276 'Got `push` action from user %s, now unlocking', user)
2278 2277 # unlock if we have push from user who locked
2279 2278 make_lock = False
2280 2279 else:
2281 2280 # we're not the same user who locked, ban with
2282 2281 # code defined in settings (default is 423 HTTP Locked) !
2283 2282 log.debug('Repo %s is currently locked by %s', repo, user)
2284 2283 currently_locked = True
2285 2284 elif action == 'pull':
2286 2285 # [0] user [1] date
2287 2286 if lock_info[0] and lock_info[1]:
2288 2287 log.debug('Repo %s is currently locked by %s', repo, user)
2289 2288 currently_locked = True
2290 2289 else:
2291 2290 log.debug('Setting lock on repo %s by %s', repo, user)
2292 2291 make_lock = True
2293 2292
2294 2293 else:
2295 2294 log.debug('Repository %s do not have locking enabled', repo)
2296 2295
2297 2296 log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s',
2298 2297 make_lock, currently_locked, lock_info)
2299 2298
2300 2299 from rhodecode.lib.auth import HasRepoPermissionAny
2301 2300 perm_check = HasRepoPermissionAny('repository.write', 'repository.admin')
2302 2301 if make_lock and not perm_check(repo_name=repo.repo_name, user=user):
2303 2302 # if we don't have at least write permission we cannot make a lock
2304 2303 log.debug('lock state reset back to FALSE due to lack '
2305 2304 'of at least read permission')
2306 2305 make_lock = False
2307 2306
2308 2307 return make_lock, currently_locked, lock_info
2309 2308
2310 2309 @property
2311 2310 def last_commit_cache_update_diff(self):
2312 2311 return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0)
2313 2312
2314 2313 @classmethod
2315 2314 def _load_commit_change(cls, last_commit_cache):
2316 2315 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2317 2316 empty_date = datetime.datetime.fromtimestamp(0)
2318 2317 date_latest = last_commit_cache.get('date', empty_date)
2319 2318 try:
2320 2319 return parse_datetime(date_latest)
2321 2320 except Exception:
2322 2321 return empty_date
2323 2322
2324 2323 @property
2325 2324 def last_commit_change(self):
2326 2325 return self._load_commit_change(self.changeset_cache)
2327 2326
2328 2327 @property
2329 2328 def last_db_change(self):
2330 2329 return self.updated_on
2331 2330
2332 2331 @property
2333 2332 def clone_uri_hidden(self):
2334 2333 clone_uri = self.clone_uri
2335 2334 if clone_uri:
2336 2335 import urlobject
2337 2336 url_obj = urlobject.URLObject(cleaned_uri(clone_uri))
2338 2337 if url_obj.password:
2339 2338 clone_uri = url_obj.with_password('*****')
2340 2339 return clone_uri
2341 2340
2342 2341 @property
2343 2342 def push_uri_hidden(self):
2344 2343 push_uri = self.push_uri
2345 2344 if push_uri:
2346 2345 import urlobject
2347 2346 url_obj = urlobject.URLObject(cleaned_uri(push_uri))
2348 2347 if url_obj.password:
2349 2348 push_uri = url_obj.with_password('*****')
2350 2349 return push_uri
2351 2350
2352 2351 def clone_url(self, **override):
2353 2352 from rhodecode.model.settings import SettingsModel
2354 2353
2355 2354 uri_tmpl = None
2356 2355 if 'with_id' in override:
2357 2356 uri_tmpl = self.DEFAULT_CLONE_URI_ID
2358 2357 del override['with_id']
2359 2358
2360 2359 if 'uri_tmpl' in override:
2361 2360 uri_tmpl = override['uri_tmpl']
2362 2361 del override['uri_tmpl']
2363 2362
2364 2363 ssh = False
2365 2364 if 'ssh' in override:
2366 2365 ssh = True
2367 2366 del override['ssh']
2368 2367
2369 2368 # we didn't override our tmpl from **overrides
2370 2369 request = get_current_request()
2371 2370 if not uri_tmpl:
2372 2371 if hasattr(request, 'call_context') and hasattr(request.call_context, 'rc_config'):
2373 2372 rc_config = request.call_context.rc_config
2374 2373 else:
2375 2374 rc_config = SettingsModel().get_all_settings(cache=True)
2376 2375
2377 2376 if ssh:
2378 2377 uri_tmpl = rc_config.get(
2379 2378 'rhodecode_clone_uri_ssh_tmpl') or self.DEFAULT_CLONE_URI_SSH
2380 2379
2381 2380 else:
2382 2381 uri_tmpl = rc_config.get(
2383 2382 'rhodecode_clone_uri_tmpl') or self.DEFAULT_CLONE_URI
2384 2383
2385 2384 return get_clone_url(request=request,
2386 2385 uri_tmpl=uri_tmpl,
2387 2386 repo_name=self.repo_name,
2388 2387 repo_id=self.repo_id,
2389 2388 repo_type=self.repo_type,
2390 2389 **override)
2391 2390
2392 2391 def set_state(self, state):
2393 2392 self.repo_state = state
2394 2393 Session().add(self)
2395 2394 #==========================================================================
2396 2395 # SCM PROPERTIES
2397 2396 #==========================================================================
2398 2397
2399 2398 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, maybe_unreachable=False, reference_obj=None):
2400 2399 return get_commit_safe(
2401 2400 self.scm_instance(), commit_id, commit_idx, pre_load=pre_load,
2402 2401 maybe_unreachable=maybe_unreachable, reference_obj=reference_obj)
2403 2402
2404 2403 def get_changeset(self, rev=None, pre_load=None):
2405 2404 warnings.warn("Use get_commit", DeprecationWarning)
2406 2405 commit_id = None
2407 2406 commit_idx = None
2408 2407 if isinstance(rev, str):
2409 2408 commit_id = rev
2410 2409 else:
2411 2410 commit_idx = rev
2412 2411 return self.get_commit(commit_id=commit_id, commit_idx=commit_idx,
2413 2412 pre_load=pre_load)
2414 2413
2415 2414 def get_landing_commit(self):
2416 2415 """
2417 2416 Returns landing commit, or if that doesn't exist returns the tip
2418 2417 """
2419 2418 _rev_type, _rev = self.landing_rev
2420 2419 commit = self.get_commit(_rev)
2421 2420 if isinstance(commit, EmptyCommit):
2422 2421 return self.get_commit()
2423 2422 return commit
2424 2423
2425 2424 def flush_commit_cache(self):
2426 2425 self.update_commit_cache(cs_cache={'raw_id':'0'})
2427 2426 self.update_commit_cache()
2428 2427
2429 2428 def update_commit_cache(self, cs_cache=None, config=None):
2430 2429 """
2431 2430 Update cache of last commit for repository
2432 2431 cache_keys should be::
2433 2432
2434 2433 source_repo_id
2435 2434 short_id
2436 2435 raw_id
2437 2436 revision
2438 2437 parents
2439 2438 message
2440 2439 date
2441 2440 author
2442 2441 updated_on
2443 2442
2444 2443 """
2445 2444 from rhodecode.lib.vcs.backends.base import BaseCommit
2446 2445 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2447 2446 empty_date = datetime.datetime.fromtimestamp(0)
2448 2447 repo_commit_count = 0
2449 2448
2450 2449 if cs_cache is None:
2451 2450 # use no-cache version here
2452 2451 try:
2453 2452 scm_repo = self.scm_instance(cache=False, config=config)
2454 2453 except VCSError:
2455 2454 scm_repo = None
2456 2455 empty = scm_repo is None or scm_repo.is_empty()
2457 2456
2458 2457 if not empty:
2459 2458 cs_cache = scm_repo.get_commit(
2460 2459 pre_load=["author", "date", "message", "parents", "branch"])
2461 2460 repo_commit_count = scm_repo.count()
2462 2461 else:
2463 2462 cs_cache = EmptyCommit()
2464 2463
2465 2464 if isinstance(cs_cache, BaseCommit):
2466 2465 cs_cache = cs_cache.__json__()
2467 2466
2468 2467 def is_outdated(new_cs_cache):
2469 2468 if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or
2470 2469 new_cs_cache['revision'] != self.changeset_cache['revision']):
2471 2470 return True
2472 2471 return False
2473 2472
2474 2473 # check if we have maybe already latest cached revision
2475 2474 if is_outdated(cs_cache) or not self.changeset_cache:
2476 2475 _current_datetime = datetime.datetime.utcnow()
2477 2476 last_change = cs_cache.get('date') or _current_datetime
2478 2477 # we check if last update is newer than the new value
2479 2478 # if yes, we use the current timestamp instead. Imagine you get
2480 2479 # old commit pushed 1y ago, we'd set last update 1y to ago.
2481 2480 last_change_timestamp = datetime_to_time(last_change)
2482 2481 current_timestamp = datetime_to_time(last_change)
2483 2482 if last_change_timestamp > current_timestamp and not empty:
2484 2483 cs_cache['date'] = _current_datetime
2485 2484
2486 2485 # also store size of repo
2487 2486 cs_cache['repo_commit_count'] = repo_commit_count
2488 2487
2489 2488 _date_latest = parse_datetime(cs_cache.get('date') or empty_date)
2490 2489 cs_cache['updated_on'] = time.time()
2491 2490 self.changeset_cache = cs_cache
2492 2491 self.updated_on = last_change
2493 2492 Session().add(self)
2494 2493 Session().commit()
2495 2494
2496 2495 else:
2497 2496 if empty:
2498 2497 cs_cache = EmptyCommit().__json__()
2499 2498 else:
2500 2499 cs_cache = self.changeset_cache
2501 2500
2502 2501 _date_latest = parse_datetime(cs_cache.get('date') or empty_date)
2503 2502
2504 2503 cs_cache['updated_on'] = time.time()
2505 2504 self.changeset_cache = cs_cache
2506 2505 self.updated_on = _date_latest
2507 2506 Session().add(self)
2508 2507 Session().commit()
2509 2508
2510 2509 log.debug('updated repo `%s` with new commit cache %s, and last update_date: %s',
2511 2510 self.repo_name, cs_cache, _date_latest)
2512 2511
2513 2512 @property
2514 2513 def tip(self):
2515 2514 return self.get_commit('tip')
2516 2515
2517 2516 @property
2518 2517 def author(self):
2519 2518 return self.tip.author
2520 2519
2521 2520 @property
2522 2521 def last_change(self):
2523 2522 return self.scm_instance().last_change
2524 2523
2525 2524 def get_comments(self, revisions=None):
2526 2525 """
2527 2526 Returns comments for this repository grouped by revisions
2528 2527
2529 2528 :param revisions: filter query by revisions only
2530 2529 """
2531 2530 cmts = ChangesetComment.query()\
2532 2531 .filter(ChangesetComment.repo == self)
2533 2532 if revisions:
2534 2533 cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
2535 2534 grouped = collections.defaultdict(list)
2536 2535 for cmt in cmts.all():
2537 2536 grouped[cmt.revision].append(cmt)
2538 2537 return grouped
2539 2538
2540 2539 def statuses(self, revisions=None):
2541 2540 """
2542 2541 Returns statuses for this repository
2543 2542
2544 2543 :param revisions: list of revisions to get statuses for
2545 2544 """
2546 2545 statuses = ChangesetStatus.query()\
2547 2546 .filter(ChangesetStatus.repo == self)\
2548 2547 .filter(ChangesetStatus.version == 0)
2549 2548
2550 2549 if revisions:
2551 2550 # Try doing the filtering in chunks to avoid hitting limits
2552 2551 size = 500
2553 2552 status_results = []
2554 2553 for chunk in range(0, len(revisions), size):
2555 2554 status_results += statuses.filter(
2556 2555 ChangesetStatus.revision.in_(
2557 2556 revisions[chunk: chunk+size])
2558 2557 ).all()
2559 2558 else:
2560 2559 status_results = statuses.all()
2561 2560
2562 2561 grouped = {}
2563 2562
2564 2563 # maybe we have open new pullrequest without a status?
2565 2564 stat = ChangesetStatus.STATUS_UNDER_REVIEW
2566 2565 status_lbl = ChangesetStatus.get_status_lbl(stat)
2567 2566 for pr in PullRequest.query().filter(PullRequest.source_repo == self).all():
2568 2567 for rev in pr.revisions:
2569 2568 pr_id = pr.pull_request_id
2570 2569 pr_repo = pr.target_repo.repo_name
2571 2570 grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
2572 2571
2573 2572 for stat in status_results:
2574 2573 pr_id = pr_repo = None
2575 2574 if stat.pull_request:
2576 2575 pr_id = stat.pull_request.pull_request_id
2577 2576 pr_repo = stat.pull_request.target_repo.repo_name
2578 2577 grouped[stat.revision] = [str(stat.status), stat.status_lbl,
2579 2578 pr_id, pr_repo]
2580 2579 return grouped
2581 2580
2582 2581 # ==========================================================================
2583 2582 # SCM CACHE INSTANCE
2584 2583 # ==========================================================================
2585 2584
2586 2585 def scm_instance(self, **kwargs):
2587 2586 import rhodecode
2588 2587
2589 2588 # Passing a config will not hit the cache currently only used
2590 2589 # for repo2dbmapper
2591 2590 config = kwargs.pop('config', None)
2592 2591 cache = kwargs.pop('cache', None)
2593 2592 vcs_full_cache = kwargs.pop('vcs_full_cache', None)
2594 2593 if vcs_full_cache is not None:
2595 2594 # allows override global config
2596 2595 full_cache = vcs_full_cache
2597 2596 else:
2598 2597 full_cache = rhodecode.ConfigGet().get_bool('vcs_full_cache')
2599 2598 # if cache is NOT defined use default global, else we have a full
2600 2599 # control over cache behaviour
2601 2600 if cache is None and full_cache and not config:
2602 2601 log.debug('Initializing pure cached instance for %s', self.repo_path)
2603 2602 return self._get_instance_cached()
2604 2603
2605 2604 # cache here is sent to the "vcs server"
2606 2605 return self._get_instance(cache=bool(cache), config=config)
2607 2606
2608 2607 def _get_instance_cached(self):
2609 2608 from rhodecode.lib import rc_cache
2610 2609
2611 2610 cache_namespace_uid = f'repo_instance.{self.repo_id}'
2612 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
2613 repo_id=self.repo_id)
2614 2611 region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid)
2615 2612
2616 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid)
2617 def get_instance_cached(repo_id, context_id, _cache_state_uid):
2618 return self._get_instance(repo_state_uid=_cache_state_uid)
2619
2620 2613 # we must use thread scoped cache here,
2621 2614 # because each thread of gevent needs it's own not shared connection and cache
2622 2615 # we also alter `args` so the cache key is individual for every green thread.
2623 inv_context_manager = rc_cache.InvalidationContext(
2624 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace,
2625 thread_scoped=True)
2616 repo_namespace_key = CacheKey.REPO_INVALIDATION_NAMESPACE.format(repo_id=self.repo_id)
2617 inv_context_manager = rc_cache.InvalidationContext(key=repo_namespace_key, thread_scoped=True)
2618
2619 # our wrapped caching function that takes state_uid to save the previous state in
2620 def cache_generator(_state_uid):
2621
2622 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid)
2623 def get_instance_cached(_repo_id, _process_context_id):
2624 # we save in cached func the generation state so we can detect a change and invalidate caches
2625 return _state_uid, self._get_instance(repo_state_uid=_state_uid)
2626
2627 return get_instance_cached
2628
2626 2629 with inv_context_manager as invalidation_context:
2627 cache_state_uid = invalidation_context.cache_data['cache_state_uid']
2628 args = (self.repo_id, inv_context_manager.cache_key, cache_state_uid)
2629
2630 # re-compute and store cache if we get invalidate signal
2631 if invalidation_context.should_invalidate():
2632 instance = get_instance_cached.refresh(*args)
2633 else:
2634 instance = get_instance_cached(*args)
2630 cache_state_uid = invalidation_context.state_uid
2631 cache_func = cache_generator(cache_state_uid)
2632
2633 args = self.repo_id, inv_context_manager.proc_key
2634
2635 previous_state_uid, instance = cache_func(*args)
2636
2637 if instance:
2638 # now compare keys, the "cache" state vs expected state.
2639 if previous_state_uid != cache_state_uid:
2640 log.warning('Cached state uid %s is different than current state uid %s',
2641 previous_state_uid, cache_state_uid)
2642 _, instance = cache_func.refresh(*args)
2635 2643
2636 2644 log.debug('Repo instance fetched in %.4fs', inv_context_manager.compute_time)
2637 2645 return instance
2638 2646
2639 2647 def _get_instance(self, cache=True, config=None, repo_state_uid=None):
2640 2648 log.debug('Initializing %s instance `%s` with cache flag set to: %s',
2641 2649 self.repo_type, self.repo_path, cache)
2642 2650 config = config or self._config
2643 2651 custom_wire = {
2644 2652 'cache': cache, # controls the vcs.remote cache
2645 2653 'repo_state_uid': repo_state_uid
2646 2654 }
2647 2655 repo = get_vcs_instance(
2648 2656 repo_path=safe_str(self.repo_full_path),
2649 2657 config=config,
2650 2658 with_wire=custom_wire,
2651 2659 create=False,
2652 2660 _vcs_alias=self.repo_type)
2653 2661 if repo is not None:
2654 2662 repo.count() # cache rebuild
2663
2655 2664 return repo
2656 2665
2657 2666 def get_shadow_repository_path(self, workspace_id):
2658 2667 from rhodecode.lib.vcs.backends.base import BaseRepository
2659 2668 shadow_repo_path = BaseRepository._get_shadow_repository_path(
2660 2669 self.repo_full_path, self.repo_id, workspace_id)
2661 2670 return shadow_repo_path
2662 2671
2663 2672 def __json__(self):
2664 2673 return {'landing_rev': self.landing_rev}
2665 2674
2666 2675 def get_dict(self):
2667 2676
2668 2677 # Since we transformed `repo_name` to a hybrid property, we need to
2669 2678 # keep compatibility with the code which uses `repo_name` field.
2670 2679
2671 2680 result = super(Repository, self).get_dict()
2672 2681 result['repo_name'] = result.pop('_repo_name', None)
2673 2682 result.pop('_changeset_cache', '')
2674 2683 return result
2675 2684
2676 2685
2677 2686 class RepoGroup(Base, BaseModel):
2678 2687 __tablename__ = 'groups'
2679 2688 __table_args__ = (
2680 2689 UniqueConstraint('group_name', 'group_parent_id'),
2681 2690 base_table_args,
2682 2691 )
2683 2692
2684 2693 CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups
2685 2694
2686 2695 group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2687 2696 _group_name = Column("group_name", String(255), nullable=False, unique=True, default=None)
2688 2697 group_name_hash = Column("repo_group_name_hash", String(1024), nullable=False, unique=False)
2689 2698 group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
2690 2699 group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None)
2691 2700 enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
2692 2701 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
2693 2702 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2694 2703 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
2695 2704 personal = Column('personal', Boolean(), nullable=True, unique=None, default=None)
2696 2705 _changeset_cache = Column("changeset_cache", LargeBinary(), nullable=True) # JSON data
2697 2706
2698 2707 repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id', back_populates='group')
2699 2708 users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all', back_populates='group')
2700 2709 parent_group = relationship('RepoGroup', remote_side=group_id)
2701 2710 user = relationship('User', back_populates='repository_groups')
2702 2711 integrations = relationship('Integration', cascade="all, delete-orphan", back_populates='repo_group')
2703 2712
2704 2713 # no cascade, set NULL
2705 2714 scope_artifacts = relationship('FileStore', primaryjoin='FileStore.scope_repo_group_id==RepoGroup.group_id', viewonly=True)
2706 2715
2707 2716 def __init__(self, group_name='', parent_group=None):
2708 2717 self.group_name = group_name
2709 2718 self.parent_group = parent_group
2710 2719
2711 2720 def __repr__(self):
2712 2721 return f"<{self.cls_name}('id:{self.group_id}:{self.group_name}')>"
2713 2722
2714 2723 @hybrid_property
2715 2724 def group_name(self):
2716 2725 return self._group_name
2717 2726
2718 2727 @group_name.setter
2719 2728 def group_name(self, value):
2720 2729 self._group_name = value
2721 2730 self.group_name_hash = self.hash_repo_group_name(value)
2722 2731
2723 2732 @classmethod
2724 2733 def _load_changeset_cache(cls, repo_id, changeset_cache_raw):
2725 2734 from rhodecode.lib.vcs.backends.base import EmptyCommit
2726 2735 dummy = EmptyCommit().__json__()
2727 2736 if not changeset_cache_raw:
2728 2737 dummy['source_repo_id'] = repo_id
2729 2738 return json.loads(json.dumps(dummy))
2730 2739
2731 2740 try:
2732 2741 return json.loads(changeset_cache_raw)
2733 2742 except TypeError:
2734 2743 return dummy
2735 2744 except Exception:
2736 2745 log.error(traceback.format_exc())
2737 2746 return dummy
2738 2747
2739 2748 @hybrid_property
2740 2749 def changeset_cache(self):
2741 2750 return self._load_changeset_cache('', self._changeset_cache)
2742 2751
2743 2752 @changeset_cache.setter
2744 2753 def changeset_cache(self, val):
2745 2754 try:
2746 2755 self._changeset_cache = json.dumps(val)
2747 2756 except Exception:
2748 2757 log.error(traceback.format_exc())
2749 2758
2750 2759 @validates('group_parent_id')
2751 2760 def validate_group_parent_id(self, key, val):
2752 2761 """
2753 2762 Check cycle references for a parent group to self
2754 2763 """
2755 2764 if self.group_id and val:
2756 2765 assert val != self.group_id
2757 2766
2758 2767 return val
2759 2768
2760 2769 @hybrid_property
2761 2770 def description_safe(self):
2762 2771 from rhodecode.lib import helpers as h
2763 2772 return h.escape(self.group_description)
2764 2773
2765 2774 @classmethod
2766 2775 def hash_repo_group_name(cls, repo_group_name):
2767 2776 val = remove_formatting(repo_group_name)
2768 2777 val = safe_str(val).lower()
2769 2778 chars = []
2770 2779 for c in val:
2771 2780 if c not in string.ascii_letters:
2772 2781 c = str(ord(c))
2773 2782 chars.append(c)
2774 2783
2775 2784 return ''.join(chars)
2776 2785
2777 2786 @classmethod
2778 2787 def _generate_choice(cls, repo_group):
2779 2788 from webhelpers2.html import literal as _literal
2780 2789
2781 2790 def _name(k):
2782 2791 return _literal(cls.CHOICES_SEPARATOR.join(k))
2783 2792
2784 2793 return repo_group.group_id, _name(repo_group.full_path_splitted)
2785 2794
2786 2795 @classmethod
2787 2796 def groups_choices(cls, groups=None, show_empty_group=True):
2788 2797 if not groups:
2789 2798 groups = cls.query().all()
2790 2799
2791 2800 repo_groups = []
2792 2801 if show_empty_group:
2793 2802 repo_groups = [(-1, '-- %s --' % _('No parent'))]
2794 2803
2795 2804 repo_groups.extend([cls._generate_choice(x) for x in groups])
2796 2805
2797 2806 repo_groups = sorted(
2798 2807 repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0])
2799 2808 return repo_groups
2800 2809
2801 2810 @classmethod
2802 2811 def url_sep(cls):
2803 2812 return URL_SEP
2804 2813
2805 2814 @classmethod
2806 2815 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
2807 2816 if case_insensitive:
2808 2817 gr = cls.query().filter(func.lower(cls.group_name)
2809 2818 == func.lower(group_name))
2810 2819 else:
2811 2820 gr = cls.query().filter(cls.group_name == group_name)
2812 2821 if cache:
2813 2822 name_key = _hash_key(group_name)
2814 2823 gr = gr.options(
2815 2824 FromCache("sql_cache_short", f"get_group_{name_key}"))
2816 2825 return gr.scalar()
2817 2826
2818 2827 @classmethod
2819 2828 def get_user_personal_repo_group(cls, user_id):
2820 2829 user = User.get(user_id)
2821 2830 if user.username == User.DEFAULT_USER:
2822 2831 return None
2823 2832
2824 2833 return cls.query()\
2825 2834 .filter(cls.personal == true()) \
2826 2835 .filter(cls.user == user) \
2827 2836 .order_by(cls.group_id.asc()) \
2828 2837 .first()
2829 2838
2830 2839 @classmethod
2831 2840 def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None),
2832 2841 case_insensitive=True):
2833 2842 q = RepoGroup.query()
2834 2843
2835 2844 if not isinstance(user_id, Optional):
2836 2845 q = q.filter(RepoGroup.user_id == user_id)
2837 2846
2838 2847 if not isinstance(group_id, Optional):
2839 2848 q = q.filter(RepoGroup.group_parent_id == group_id)
2840 2849
2841 2850 if case_insensitive:
2842 2851 q = q.order_by(func.lower(RepoGroup.group_name))
2843 2852 else:
2844 2853 q = q.order_by(RepoGroup.group_name)
2845 2854 return q.all()
2846 2855
2847 2856 @property
2848 2857 def parents(self, parents_recursion_limit=10):
2849 2858 groups = []
2850 2859 if self.parent_group is None:
2851 2860 return groups
2852 2861 cur_gr = self.parent_group
2853 2862 groups.insert(0, cur_gr)
2854 2863 cnt = 0
2855 2864 while 1:
2856 2865 cnt += 1
2857 2866 gr = getattr(cur_gr, 'parent_group', None)
2858 2867 cur_gr = cur_gr.parent_group
2859 2868 if gr is None:
2860 2869 break
2861 2870 if cnt == parents_recursion_limit:
2862 2871 # this will prevent accidental infinit loops
2863 2872 log.error('more than %s parents found for group %s, stopping '
2864 2873 'recursive parent fetching', parents_recursion_limit, self)
2865 2874 break
2866 2875
2867 2876 groups.insert(0, gr)
2868 2877 return groups
2869 2878
2870 2879 @property
2871 2880 def last_commit_cache_update_diff(self):
2872 2881 return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0)
2873 2882
2874 2883 @classmethod
2875 2884 def _load_commit_change(cls, last_commit_cache):
2876 2885 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2877 2886 empty_date = datetime.datetime.fromtimestamp(0)
2878 2887 date_latest = last_commit_cache.get('date', empty_date)
2879 2888 try:
2880 2889 return parse_datetime(date_latest)
2881 2890 except Exception:
2882 2891 return empty_date
2883 2892
2884 2893 @property
2885 2894 def last_commit_change(self):
2886 2895 return self._load_commit_change(self.changeset_cache)
2887 2896
2888 2897 @property
2889 2898 def last_db_change(self):
2890 2899 return self.updated_on
2891 2900
2892 2901 @property
2893 2902 def children(self):
2894 2903 return RepoGroup.query().filter(RepoGroup.parent_group == self)
2895 2904
2896 2905 @property
2897 2906 def name(self):
2898 2907 return self.group_name.split(RepoGroup.url_sep())[-1]
2899 2908
2900 2909 @property
2901 2910 def full_path(self):
2902 2911 return self.group_name
2903 2912
2904 2913 @property
2905 2914 def full_path_splitted(self):
2906 2915 return self.group_name.split(RepoGroup.url_sep())
2907 2916
2908 2917 @property
2909 2918 def repositories(self):
2910 2919 return Repository.query()\
2911 2920 .filter(Repository.group == self)\
2912 2921 .order_by(Repository.repo_name)
2913 2922
2914 2923 @property
2915 2924 def repositories_recursive_count(self):
2916 2925 cnt = self.repositories.count()
2917 2926
2918 2927 def children_count(group):
2919 2928 cnt = 0
2920 2929 for child in group.children:
2921 2930 cnt += child.repositories.count()
2922 2931 cnt += children_count(child)
2923 2932 return cnt
2924 2933
2925 2934 return cnt + children_count(self)
2926 2935
2927 2936 def _recursive_objects(self, include_repos=True, include_groups=True):
2928 2937 all_ = []
2929 2938
2930 2939 def _get_members(root_gr):
2931 2940 if include_repos:
2932 2941 for r in root_gr.repositories:
2933 2942 all_.append(r)
2934 2943 childs = root_gr.children.all()
2935 2944 if childs:
2936 2945 for gr in childs:
2937 2946 if include_groups:
2938 2947 all_.append(gr)
2939 2948 _get_members(gr)
2940 2949
2941 2950 root_group = []
2942 2951 if include_groups:
2943 2952 root_group = [self]
2944 2953
2945 2954 _get_members(self)
2946 2955 return root_group + all_
2947 2956
2948 2957 def recursive_groups_and_repos(self):
2949 2958 """
2950 2959 Recursive return all groups, with repositories in those groups
2951 2960 """
2952 2961 return self._recursive_objects()
2953 2962
2954 2963 def recursive_groups(self):
2955 2964 """
2956 2965 Returns all children groups for this group including children of children
2957 2966 """
2958 2967 return self._recursive_objects(include_repos=False)
2959 2968
2960 2969 def recursive_repos(self):
2961 2970 """
2962 2971 Returns all children repositories for this group
2963 2972 """
2964 2973 return self._recursive_objects(include_groups=False)
2965 2974
2966 2975 def get_new_name(self, group_name):
2967 2976 """
2968 2977 returns new full group name based on parent and new name
2969 2978
2970 2979 :param group_name:
2971 2980 """
2972 2981 path_prefix = (self.parent_group.full_path_splitted if
2973 2982 self.parent_group else [])
2974 2983 return RepoGroup.url_sep().join(path_prefix + [group_name])
2975 2984
2976 2985 def update_commit_cache(self, config=None):
2977 2986 """
2978 2987 Update cache of last commit for newest repository inside this repository group.
2979 2988 cache_keys should be::
2980 2989
2981 2990 source_repo_id
2982 2991 short_id
2983 2992 raw_id
2984 2993 revision
2985 2994 parents
2986 2995 message
2987 2996 date
2988 2997 author
2989 2998
2990 2999 """
2991 3000 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2992 3001 empty_date = datetime.datetime.fromtimestamp(0)
2993 3002
2994 3003 def repo_groups_and_repos(root_gr):
2995 3004 for _repo in root_gr.repositories:
2996 3005 yield _repo
2997 3006 for child_group in root_gr.children.all():
2998 3007 yield child_group
2999 3008
3000 3009 latest_repo_cs_cache = {}
3001 3010 for obj in repo_groups_and_repos(self):
3002 3011 repo_cs_cache = obj.changeset_cache
3003 3012 date_latest = latest_repo_cs_cache.get('date', empty_date)
3004 3013 date_current = repo_cs_cache.get('date', empty_date)
3005 3014 current_timestamp = datetime_to_time(parse_datetime(date_latest))
3006 3015 if current_timestamp < datetime_to_time(parse_datetime(date_current)):
3007 3016 latest_repo_cs_cache = repo_cs_cache
3008 3017 if hasattr(obj, 'repo_id'):
3009 3018 latest_repo_cs_cache['source_repo_id'] = obj.repo_id
3010 3019 else:
3011 3020 latest_repo_cs_cache['source_repo_id'] = repo_cs_cache.get('source_repo_id')
3012 3021
3013 3022 _date_latest = parse_datetime(latest_repo_cs_cache.get('date') or empty_date)
3014 3023
3015 3024 latest_repo_cs_cache['updated_on'] = time.time()
3016 3025 self.changeset_cache = latest_repo_cs_cache
3017 3026 self.updated_on = _date_latest
3018 3027 Session().add(self)
3019 3028 Session().commit()
3020 3029
3021 3030 log.debug('updated repo group `%s` with new commit cache %s, and last update_date: %s',
3022 3031 self.group_name, latest_repo_cs_cache, _date_latest)
3023 3032
3024 3033 def permissions(self, with_admins=True, with_owner=True,
3025 3034 expand_from_user_groups=False):
3026 3035 """
3027 3036 Permissions for repository groups
3028 3037 """
3029 3038 _admin_perm = 'group.admin'
3030 3039
3031 3040 owner_row = []
3032 3041 if with_owner:
3033 3042 usr = AttributeDict(self.user.get_dict())
3034 3043 usr.owner_row = True
3035 3044 usr.permission = _admin_perm
3036 3045 owner_row.append(usr)
3037 3046
3038 3047 super_admin_ids = []
3039 3048 super_admin_rows = []
3040 3049 if with_admins:
3041 3050 for usr in User.get_all_super_admins():
3042 3051 super_admin_ids.append(usr.user_id)
3043 3052 # if this admin is also owner, don't double the record
3044 3053 if usr.user_id == owner_row[0].user_id:
3045 3054 owner_row[0].admin_row = True
3046 3055 else:
3047 3056 usr = AttributeDict(usr.get_dict())
3048 3057 usr.admin_row = True
3049 3058 usr.permission = _admin_perm
3050 3059 super_admin_rows.append(usr)
3051 3060
3052 3061 q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self)
3053 3062 q = q.options(joinedload(UserRepoGroupToPerm.group),
3054 3063 joinedload(UserRepoGroupToPerm.user),
3055 3064 joinedload(UserRepoGroupToPerm.permission),)
3056 3065
3057 3066 # get owners and admins and permissions. We do a trick of re-writing
3058 3067 # objects from sqlalchemy to named-tuples due to sqlalchemy session
3059 3068 # has a global reference and changing one object propagates to all
3060 3069 # others. This means if admin is also an owner admin_row that change
3061 3070 # would propagate to both objects
3062 3071 perm_rows = []
3063 3072 for _usr in q.all():
3064 3073 usr = AttributeDict(_usr.user.get_dict())
3065 3074 # if this user is also owner/admin, mark as duplicate record
3066 3075 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
3067 3076 usr.duplicate_perm = True
3068 3077 usr.permission = _usr.permission.permission_name
3069 3078 perm_rows.append(usr)
3070 3079
3071 3080 # filter the perm rows by 'default' first and then sort them by
3072 3081 # admin,write,read,none permissions sorted again alphabetically in
3073 3082 # each group
3074 3083 perm_rows = sorted(perm_rows, key=display_user_sort)
3075 3084
3076 3085 user_groups_rows = []
3077 3086 if expand_from_user_groups:
3078 3087 for ug in self.permission_user_groups(with_members=True):
3079 3088 for user_data in ug.members:
3080 3089 user_groups_rows.append(user_data)
3081 3090
3082 3091 return super_admin_rows + owner_row + perm_rows + user_groups_rows
3083 3092
3084 3093 def permission_user_groups(self, with_members=False):
3085 3094 q = UserGroupRepoGroupToPerm.query()\
3086 3095 .filter(UserGroupRepoGroupToPerm.group == self)
3087 3096 q = q.options(joinedload(UserGroupRepoGroupToPerm.group),
3088 3097 joinedload(UserGroupRepoGroupToPerm.users_group),
3089 3098 joinedload(UserGroupRepoGroupToPerm.permission),)
3090 3099
3091 3100 perm_rows = []
3092 3101 for _user_group in q.all():
3093 3102 entry = AttributeDict(_user_group.users_group.get_dict())
3094 3103 entry.permission = _user_group.permission.permission_name
3095 3104 if with_members:
3096 3105 entry.members = [x.user.get_dict()
3097 3106 for x in _user_group.users_group.members]
3098 3107 perm_rows.append(entry)
3099 3108
3100 3109 perm_rows = sorted(perm_rows, key=display_user_group_sort)
3101 3110 return perm_rows
3102 3111
3103 3112 def get_api_data(self):
3104 3113 """
3105 3114 Common function for generating api data
3106 3115
3107 3116 """
3108 3117 group = self
3109 3118 data = {
3110 3119 'group_id': group.group_id,
3111 3120 'group_name': group.group_name,
3112 3121 'group_description': group.description_safe,
3113 3122 'parent_group': group.parent_group.group_name if group.parent_group else None,
3114 3123 'repositories': [x.repo_name for x in group.repositories],
3115 3124 'owner': group.user.username,
3116 3125 }
3117 3126 return data
3118 3127
3119 3128 def get_dict(self):
3120 3129 # Since we transformed `group_name` to a hybrid property, we need to
3121 3130 # keep compatibility with the code which uses `group_name` field.
3122 3131 result = super(RepoGroup, self).get_dict()
3123 3132 result['group_name'] = result.pop('_group_name', None)
3124 3133 result.pop('_changeset_cache', '')
3125 3134 return result
3126 3135
3127 3136
3128 3137 class Permission(Base, BaseModel):
3129 3138 __tablename__ = 'permissions'
3130 3139 __table_args__ = (
3131 3140 Index('p_perm_name_idx', 'permission_name'),
3132 3141 base_table_args,
3133 3142 )
3134 3143
3135 3144 PERMS = [
3136 3145 ('hg.admin', _('RhodeCode Super Administrator')),
3137 3146
3138 3147 ('repository.none', _('Repository no access')),
3139 3148 ('repository.read', _('Repository read access')),
3140 3149 ('repository.write', _('Repository write access')),
3141 3150 ('repository.admin', _('Repository admin access')),
3142 3151
3143 3152 ('group.none', _('Repository group no access')),
3144 3153 ('group.read', _('Repository group read access')),
3145 3154 ('group.write', _('Repository group write access')),
3146 3155 ('group.admin', _('Repository group admin access')),
3147 3156
3148 3157 ('usergroup.none', _('User group no access')),
3149 3158 ('usergroup.read', _('User group read access')),
3150 3159 ('usergroup.write', _('User group write access')),
3151 3160 ('usergroup.admin', _('User group admin access')),
3152 3161
3153 3162 ('branch.none', _('Branch no permissions')),
3154 3163 ('branch.merge', _('Branch access by web merge')),
3155 3164 ('branch.push', _('Branch access by push')),
3156 3165 ('branch.push_force', _('Branch access by push with force')),
3157 3166
3158 3167 ('hg.repogroup.create.false', _('Repository Group creation disabled')),
3159 3168 ('hg.repogroup.create.true', _('Repository Group creation enabled')),
3160 3169
3161 3170 ('hg.usergroup.create.false', _('User Group creation disabled')),
3162 3171 ('hg.usergroup.create.true', _('User Group creation enabled')),
3163 3172
3164 3173 ('hg.create.none', _('Repository creation disabled')),
3165 3174 ('hg.create.repository', _('Repository creation enabled')),
3166 3175 ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
3167 3176 ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
3168 3177
3169 3178 ('hg.fork.none', _('Repository forking disabled')),
3170 3179 ('hg.fork.repository', _('Repository forking enabled')),
3171 3180
3172 3181 ('hg.register.none', _('Registration disabled')),
3173 3182 ('hg.register.manual_activate', _('User Registration with manual account activation')),
3174 3183 ('hg.register.auto_activate', _('User Registration with automatic account activation')),
3175 3184
3176 3185 ('hg.password_reset.enabled', _('Password reset enabled')),
3177 3186 ('hg.password_reset.hidden', _('Password reset hidden')),
3178 3187 ('hg.password_reset.disabled', _('Password reset disabled')),
3179 3188
3180 3189 ('hg.extern_activate.manual', _('Manual activation of external account')),
3181 3190 ('hg.extern_activate.auto', _('Automatic activation of external account')),
3182 3191
3183 3192 ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')),
3184 3193 ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')),
3185 3194 ]
3186 3195
3187 3196 # definition of system default permissions for DEFAULT user, created on
3188 3197 # system setup
3189 3198 DEFAULT_USER_PERMISSIONS = [
3190 3199 # object perms
3191 3200 'repository.read',
3192 3201 'group.read',
3193 3202 'usergroup.read',
3194 3203 # branch, for backward compat we need same value as before so forced pushed
3195 3204 'branch.push_force',
3196 3205 # global
3197 3206 'hg.create.repository',
3198 3207 'hg.repogroup.create.false',
3199 3208 'hg.usergroup.create.false',
3200 3209 'hg.create.write_on_repogroup.true',
3201 3210 'hg.fork.repository',
3202 3211 'hg.register.manual_activate',
3203 3212 'hg.password_reset.enabled',
3204 3213 'hg.extern_activate.auto',
3205 3214 'hg.inherit_default_perms.true',
3206 3215 ]
3207 3216
3208 3217 # defines which permissions are more important higher the more important
3209 3218 # Weight defines which permissions are more important.
3210 3219 # The higher number the more important.
3211 3220 PERM_WEIGHTS = {
3212 3221 'repository.none': 0,
3213 3222 'repository.read': 1,
3214 3223 'repository.write': 3,
3215 3224 'repository.admin': 4,
3216 3225
3217 3226 'group.none': 0,
3218 3227 'group.read': 1,
3219 3228 'group.write': 3,
3220 3229 'group.admin': 4,
3221 3230
3222 3231 'usergroup.none': 0,
3223 3232 'usergroup.read': 1,
3224 3233 'usergroup.write': 3,
3225 3234 'usergroup.admin': 4,
3226 3235
3227 3236 'branch.none': 0,
3228 3237 'branch.merge': 1,
3229 3238 'branch.push': 3,
3230 3239 'branch.push_force': 4,
3231 3240
3232 3241 'hg.repogroup.create.false': 0,
3233 3242 'hg.repogroup.create.true': 1,
3234 3243
3235 3244 'hg.usergroup.create.false': 0,
3236 3245 'hg.usergroup.create.true': 1,
3237 3246
3238 3247 'hg.fork.none': 0,
3239 3248 'hg.fork.repository': 1,
3240 3249 'hg.create.none': 0,
3241 3250 'hg.create.repository': 1
3242 3251 }
3243 3252
3244 3253 permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3245 3254 permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None)
3246 3255 permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None)
3247 3256
3248 3257 def __repr__(self):
3249 3258 return "<%s('%s:%s')>" % (
3250 3259 self.cls_name, self.permission_id, self.permission_name
3251 3260 )
3252 3261
3253 3262 @classmethod
3254 3263 def get_by_key(cls, key):
3255 3264 return cls.query().filter(cls.permission_name == key).scalar()
3256 3265
3257 3266 @classmethod
3258 3267 def get_default_repo_perms(cls, user_id, repo_id=None):
3259 3268 q = Session().query(UserRepoToPerm, Repository, Permission)\
3260 3269 .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\
3261 3270 .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
3262 3271 .filter(UserRepoToPerm.user_id == user_id)
3263 3272 if repo_id:
3264 3273 q = q.filter(UserRepoToPerm.repository_id == repo_id)
3265 3274 return q.all()
3266 3275
3267 3276 @classmethod
3268 3277 def get_default_repo_branch_perms(cls, user_id, repo_id=None):
3269 3278 q = Session().query(UserToRepoBranchPermission, UserRepoToPerm, Permission) \
3270 3279 .join(
3271 3280 Permission,
3272 3281 UserToRepoBranchPermission.permission_id == Permission.permission_id) \
3273 3282 .join(
3274 3283 UserRepoToPerm,
3275 3284 UserToRepoBranchPermission.rule_to_perm_id == UserRepoToPerm.repo_to_perm_id) \
3276 3285 .filter(UserRepoToPerm.user_id == user_id)
3277 3286
3278 3287 if repo_id:
3279 3288 q = q.filter(UserToRepoBranchPermission.repository_id == repo_id)
3280 3289 return q.order_by(UserToRepoBranchPermission.rule_order).all()
3281 3290
3282 3291 @classmethod
3283 3292 def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None):
3284 3293 q = Session().query(UserGroupRepoToPerm, Repository, Permission)\
3285 3294 .join(
3286 3295 Permission,
3287 3296 UserGroupRepoToPerm.permission_id == Permission.permission_id)\
3288 3297 .join(
3289 3298 Repository,
3290 3299 UserGroupRepoToPerm.repository_id == Repository.repo_id)\
3291 3300 .join(
3292 3301 UserGroup,
3293 3302 UserGroupRepoToPerm.users_group_id ==
3294 3303 UserGroup.users_group_id)\
3295 3304 .join(
3296 3305 UserGroupMember,
3297 3306 UserGroupRepoToPerm.users_group_id ==
3298 3307 UserGroupMember.users_group_id)\
3299 3308 .filter(
3300 3309 UserGroupMember.user_id == user_id,
3301 3310 UserGroup.users_group_active == true())
3302 3311 if repo_id:
3303 3312 q = q.filter(UserGroupRepoToPerm.repository_id == repo_id)
3304 3313 return q.all()
3305 3314
3306 3315 @classmethod
3307 3316 def get_default_repo_branch_perms_from_user_group(cls, user_id, repo_id=None):
3308 3317 q = Session().query(UserGroupToRepoBranchPermission, UserGroupRepoToPerm, Permission) \
3309 3318 .join(
3310 3319 Permission,
3311 3320 UserGroupToRepoBranchPermission.permission_id == Permission.permission_id) \
3312 3321 .join(
3313 3322 UserGroupRepoToPerm,
3314 3323 UserGroupToRepoBranchPermission.rule_to_perm_id == UserGroupRepoToPerm.users_group_to_perm_id) \
3315 3324 .join(
3316 3325 UserGroup,
3317 3326 UserGroupRepoToPerm.users_group_id == UserGroup.users_group_id) \
3318 3327 .join(
3319 3328 UserGroupMember,
3320 3329 UserGroupRepoToPerm.users_group_id == UserGroupMember.users_group_id) \
3321 3330 .filter(
3322 3331 UserGroupMember.user_id == user_id,
3323 3332 UserGroup.users_group_active == true())
3324 3333
3325 3334 if repo_id:
3326 3335 q = q.filter(UserGroupToRepoBranchPermission.repository_id == repo_id)
3327 3336 return q.order_by(UserGroupToRepoBranchPermission.rule_order).all()
3328 3337
3329 3338 @classmethod
3330 3339 def get_default_group_perms(cls, user_id, repo_group_id=None):
3331 3340 q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\
3332 3341 .join(
3333 3342 Permission,
3334 3343 UserRepoGroupToPerm.permission_id == Permission.permission_id)\
3335 3344 .join(
3336 3345 RepoGroup,
3337 3346 UserRepoGroupToPerm.group_id == RepoGroup.group_id)\
3338 3347 .filter(UserRepoGroupToPerm.user_id == user_id)
3339 3348 if repo_group_id:
3340 3349 q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id)
3341 3350 return q.all()
3342 3351
3343 3352 @classmethod
3344 3353 def get_default_group_perms_from_user_group(
3345 3354 cls, user_id, repo_group_id=None):
3346 3355 q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\
3347 3356 .join(
3348 3357 Permission,
3349 3358 UserGroupRepoGroupToPerm.permission_id ==
3350 3359 Permission.permission_id)\
3351 3360 .join(
3352 3361 RepoGroup,
3353 3362 UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\
3354 3363 .join(
3355 3364 UserGroup,
3356 3365 UserGroupRepoGroupToPerm.users_group_id ==
3357 3366 UserGroup.users_group_id)\
3358 3367 .join(
3359 3368 UserGroupMember,
3360 3369 UserGroupRepoGroupToPerm.users_group_id ==
3361 3370 UserGroupMember.users_group_id)\
3362 3371 .filter(
3363 3372 UserGroupMember.user_id == user_id,
3364 3373 UserGroup.users_group_active == true())
3365 3374 if repo_group_id:
3366 3375 q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id)
3367 3376 return q.all()
3368 3377
3369 3378 @classmethod
3370 3379 def get_default_user_group_perms(cls, user_id, user_group_id=None):
3371 3380 q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\
3372 3381 .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\
3373 3382 .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\
3374 3383 .filter(UserUserGroupToPerm.user_id == user_id)
3375 3384 if user_group_id:
3376 3385 q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id)
3377 3386 return q.all()
3378 3387
3379 3388 @classmethod
3380 3389 def get_default_user_group_perms_from_user_group(
3381 3390 cls, user_id, user_group_id=None):
3382 3391 TargetUserGroup = aliased(UserGroup, name='target_user_group')
3383 3392 q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\
3384 3393 .join(
3385 3394 Permission,
3386 3395 UserGroupUserGroupToPerm.permission_id ==
3387 3396 Permission.permission_id)\
3388 3397 .join(
3389 3398 TargetUserGroup,
3390 3399 UserGroupUserGroupToPerm.target_user_group_id ==
3391 3400 TargetUserGroup.users_group_id)\
3392 3401 .join(
3393 3402 UserGroup,
3394 3403 UserGroupUserGroupToPerm.user_group_id ==
3395 3404 UserGroup.users_group_id)\
3396 3405 .join(
3397 3406 UserGroupMember,
3398 3407 UserGroupUserGroupToPerm.user_group_id ==
3399 3408 UserGroupMember.users_group_id)\
3400 3409 .filter(
3401 3410 UserGroupMember.user_id == user_id,
3402 3411 UserGroup.users_group_active == true())
3403 3412 if user_group_id:
3404 3413 q = q.filter(
3405 3414 UserGroupUserGroupToPerm.user_group_id == user_group_id)
3406 3415
3407 3416 return q.all()
3408 3417
3409 3418
3410 3419 class UserRepoToPerm(Base, BaseModel):
3411 3420 __tablename__ = 'repo_to_perm'
3412 3421 __table_args__ = (
3413 3422 UniqueConstraint('user_id', 'repository_id', 'permission_id'),
3414 3423 base_table_args
3415 3424 )
3416 3425
3417 3426 repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3418 3427 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3419 3428 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3420 3429 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
3421 3430
3422 3431 user = relationship('User', back_populates="repo_to_perm")
3423 3432 repository = relationship('Repository', back_populates="repo_to_perm")
3424 3433 permission = relationship('Permission')
3425 3434
3426 3435 branch_perm_entry = relationship('UserToRepoBranchPermission', cascade="all, delete-orphan", lazy='joined', back_populates='user_repo_to_perm')
3427 3436
3428 3437 @classmethod
3429 3438 def create(cls, user, repository, permission):
3430 3439 n = cls()
3431 3440 n.user = user
3432 3441 n.repository = repository
3433 3442 n.permission = permission
3434 3443 Session().add(n)
3435 3444 return n
3436 3445
3437 3446 def __repr__(self):
3438 3447 return f'<{self.user} => {self.repository} >'
3439 3448
3440 3449
3441 3450 class UserUserGroupToPerm(Base, BaseModel):
3442 3451 __tablename__ = 'user_user_group_to_perm'
3443 3452 __table_args__ = (
3444 3453 UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
3445 3454 base_table_args
3446 3455 )
3447 3456
3448 3457 user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3449 3458 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3450 3459 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3451 3460 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3452 3461
3453 3462 user = relationship('User', back_populates='user_group_to_perm')
3454 3463 user_group = relationship('UserGroup', back_populates='user_user_group_to_perm')
3455 3464 permission = relationship('Permission')
3456 3465
3457 3466 @classmethod
3458 3467 def create(cls, user, user_group, permission):
3459 3468 n = cls()
3460 3469 n.user = user
3461 3470 n.user_group = user_group
3462 3471 n.permission = permission
3463 3472 Session().add(n)
3464 3473 return n
3465 3474
3466 3475 def __repr__(self):
3467 3476 return f'<{self.user} => {self.user_group} >'
3468 3477
3469 3478
3470 3479 class UserToPerm(Base, BaseModel):
3471 3480 __tablename__ = 'user_to_perm'
3472 3481 __table_args__ = (
3473 3482 UniqueConstraint('user_id', 'permission_id'),
3474 3483 base_table_args
3475 3484 )
3476 3485
3477 3486 user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3478 3487 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3479 3488 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3480 3489
3481 3490 user = relationship('User', back_populates='user_perms')
3482 3491 permission = relationship('Permission', lazy='joined')
3483 3492
3484 3493 def __repr__(self):
3485 3494 return f'<{self.user} => {self.permission} >'
3486 3495
3487 3496
3488 3497 class UserGroupRepoToPerm(Base, BaseModel):
3489 3498 __tablename__ = 'users_group_repo_to_perm'
3490 3499 __table_args__ = (
3491 3500 UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
3492 3501 base_table_args
3493 3502 )
3494 3503
3495 3504 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3496 3505 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3497 3506 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3498 3507 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
3499 3508
3500 3509 users_group = relationship('UserGroup', back_populates='users_group_repo_to_perm')
3501 3510 permission = relationship('Permission')
3502 3511 repository = relationship('Repository', back_populates='users_group_to_perm')
3503 3512 user_group_branch_perms = relationship('UserGroupToRepoBranchPermission', cascade='all', back_populates='user_group_repo_to_perm')
3504 3513
3505 3514 @classmethod
3506 3515 def create(cls, users_group, repository, permission):
3507 3516 n = cls()
3508 3517 n.users_group = users_group
3509 3518 n.repository = repository
3510 3519 n.permission = permission
3511 3520 Session().add(n)
3512 3521 return n
3513 3522
3514 3523 def __repr__(self):
3515 3524 return f'<UserGroupRepoToPerm:{self.users_group} => {self.repository} >'
3516 3525
3517 3526
3518 3527 class UserGroupUserGroupToPerm(Base, BaseModel):
3519 3528 __tablename__ = 'user_group_user_group_to_perm'
3520 3529 __table_args__ = (
3521 3530 UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
3522 3531 CheckConstraint('target_user_group_id != user_group_id'),
3523 3532 base_table_args
3524 3533 )
3525 3534
3526 3535 user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3527 3536 target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3528 3537 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3529 3538 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3530 3539
3531 3540 target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id', back_populates='user_group_user_group_to_perm')
3532 3541 user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
3533 3542 permission = relationship('Permission')
3534 3543
3535 3544 @classmethod
3536 3545 def create(cls, target_user_group, user_group, permission):
3537 3546 n = cls()
3538 3547 n.target_user_group = target_user_group
3539 3548 n.user_group = user_group
3540 3549 n.permission = permission
3541 3550 Session().add(n)
3542 3551 return n
3543 3552
3544 3553 def __repr__(self):
3545 3554 return f'<UserGroupUserGroup:{self.target_user_group} => {self.user_group} >'
3546 3555
3547 3556
3548 3557 class UserGroupToPerm(Base, BaseModel):
3549 3558 __tablename__ = 'users_group_to_perm'
3550 3559 __table_args__ = (
3551 3560 UniqueConstraint('users_group_id', 'permission_id',),
3552 3561 base_table_args
3553 3562 )
3554 3563
3555 3564 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3556 3565 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3557 3566 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3558 3567
3559 3568 users_group = relationship('UserGroup', back_populates='users_group_to_perm')
3560 3569 permission = relationship('Permission')
3561 3570
3562 3571
3563 3572 class UserRepoGroupToPerm(Base, BaseModel):
3564 3573 __tablename__ = 'user_repo_group_to_perm'
3565 3574 __table_args__ = (
3566 3575 UniqueConstraint('user_id', 'group_id', 'permission_id'),
3567 3576 base_table_args
3568 3577 )
3569 3578
3570 3579 group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3571 3580 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3572 3581 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3573 3582 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3574 3583
3575 3584 user = relationship('User', back_populates='repo_group_to_perm')
3576 3585 group = relationship('RepoGroup', back_populates='repo_group_to_perm')
3577 3586 permission = relationship('Permission')
3578 3587
3579 3588 @classmethod
3580 3589 def create(cls, user, repository_group, permission):
3581 3590 n = cls()
3582 3591 n.user = user
3583 3592 n.group = repository_group
3584 3593 n.permission = permission
3585 3594 Session().add(n)
3586 3595 return n
3587 3596
3588 3597
3589 3598 class UserGroupRepoGroupToPerm(Base, BaseModel):
3590 3599 __tablename__ = 'users_group_repo_group_to_perm'
3591 3600 __table_args__ = (
3592 3601 UniqueConstraint('users_group_id', 'group_id'),
3593 3602 base_table_args
3594 3603 )
3595 3604
3596 3605 users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3597 3606 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3598 3607 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3599 3608 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3600 3609
3601 3610 users_group = relationship('UserGroup', back_populates='users_group_repo_group_to_perm')
3602 3611 permission = relationship('Permission')
3603 3612 group = relationship('RepoGroup', back_populates='users_group_to_perm')
3604 3613
3605 3614 @classmethod
3606 3615 def create(cls, user_group, repository_group, permission):
3607 3616 n = cls()
3608 3617 n.users_group = user_group
3609 3618 n.group = repository_group
3610 3619 n.permission = permission
3611 3620 Session().add(n)
3612 3621 return n
3613 3622
3614 3623 def __repr__(self):
3615 3624 return '<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group)
3616 3625
3617 3626
3618 3627 class Statistics(Base, BaseModel):
3619 3628 __tablename__ = 'statistics'
3620 3629 __table_args__ = (
3621 3630 base_table_args
3622 3631 )
3623 3632
3624 3633 stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3625 3634 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
3626 3635 stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
3627 3636 commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False) #JSON data
3628 3637 commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False) #JSON data
3629 3638 languages = Column("languages", LargeBinary(1000000), nullable=False) #JSON data
3630 3639
3631 3640 repository = relationship('Repository', single_parent=True, viewonly=True)
3632 3641
3633 3642
3634 3643 class UserFollowing(Base, BaseModel):
3635 3644 __tablename__ = 'user_followings'
3636 3645 __table_args__ = (
3637 3646 UniqueConstraint('user_id', 'follows_repository_id'),
3638 3647 UniqueConstraint('user_id', 'follows_user_id'),
3639 3648 base_table_args
3640 3649 )
3641 3650
3642 3651 user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3643 3652 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3644 3653 follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
3645 3654 follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
3646 3655 follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
3647 3656
3648 3657 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id', back_populates='followings')
3649 3658
3650 3659 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
3651 3660 follows_repository = relationship('Repository', order_by='Repository.repo_name', back_populates='followers')
3652 3661
3653 3662 @classmethod
3654 3663 def get_repo_followers(cls, repo_id):
3655 3664 return cls.query().filter(cls.follows_repo_id == repo_id)
3656 3665
3657 3666
3658 3667 class CacheKey(Base, BaseModel):
3659 3668 __tablename__ = 'cache_invalidation'
3660 3669 __table_args__ = (
3661 3670 UniqueConstraint('cache_key'),
3662 3671 Index('key_idx', 'cache_key'),
3663 3672 Index('cache_args_idx', 'cache_args'),
3664 3673 base_table_args,
3665 3674 )
3666 3675
3667 3676 CACHE_TYPE_FEED = 'FEED'
3668 3677
3669 3678 # namespaces used to register process/thread aware caches
3670 3679 REPO_INVALIDATION_NAMESPACE = 'repo_cache.v1:{repo_id}'
3671 3680
3672 3681 cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3673 3682 cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None)
3674 3683 cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None)
3675 3684 cache_state_uid = Column("cache_state_uid", String(255), nullable=True, unique=None, default=None)
3676 3685 cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
3677 3686
3678 def __init__(self, cache_key, cache_args='', cache_state_uid=None):
3687 def __init__(self, cache_key, cache_args='', cache_state_uid=None, cache_active=False):
3679 3688 self.cache_key = cache_key
3680 3689 self.cache_args = cache_args
3681 self.cache_active = False
3690 self.cache_active = cache_active
3682 3691 # first key should be same for all entries, since all workers should share it
3683 3692 self.cache_state_uid = cache_state_uid or self.generate_new_state_uid()
3684 3693
3685 3694 def __repr__(self):
3686 3695 return "<%s('%s:%s[%s]')>" % (
3687 3696 self.cls_name,
3688 3697 self.cache_id, self.cache_key, self.cache_active)
3689 3698
3690 3699 def _cache_key_partition(self):
3691 3700 prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
3692 3701 return prefix, repo_name, suffix
3693 3702
3694 3703 def get_prefix(self):
3695 3704 """
3696 3705 Try to extract prefix from existing cache key. The key could consist
3697 3706 of prefix, repo_name, suffix
3698 3707 """
3699 3708 # this returns prefix, repo_name, suffix
3700 3709 return self._cache_key_partition()[0]
3701 3710
3702 3711 def get_suffix(self):
3703 3712 """
3704 3713 get suffix that might have been used in _get_cache_key to
3705 3714 generate self.cache_key. Only used for informational purposes
3706 3715 in repo_edit.mako.
3707 3716 """
3708 3717 # prefix, repo_name, suffix
3709 3718 return self._cache_key_partition()[2]
3710 3719
3711 3720 @classmethod
3712 3721 def generate_new_state_uid(cls, based_on=None):
3713 3722 if based_on:
3714 3723 return str(uuid.uuid5(uuid.NAMESPACE_URL, safe_str(based_on)))
3715 3724 else:
3716 3725 return str(uuid.uuid4())
3717 3726
3718 3727 @classmethod
3719 3728 def delete_all_cache(cls):
3720 3729 """
3721 3730 Delete all cache keys from database.
3722 3731 Should only be run when all instances are down and all entries
3723 3732 thus stale.
3724 3733 """
3725 3734 cls.query().delete()
3726 3735 Session().commit()
3727 3736
3728 3737 @classmethod
3729 3738 def set_invalidate(cls, cache_uid, delete=False):
3730 3739 """
3731 3740 Mark all caches of a repo as invalid in the database.
3732 3741 """
3733
3734 3742 try:
3735 qry = Session().query(cls).filter(cls.cache_args == cache_uid)
3743 qry = Session().query(cls).filter(cls.cache_key == cache_uid)
3736 3744 if delete:
3737 3745 qry.delete()
3738 3746 log.debug('cache objects deleted for cache args %s',
3739 3747 safe_str(cache_uid))
3740 3748 else:
3741 qry.update({"cache_active": False,
3742 "cache_state_uid": cls.generate_new_state_uid()})
3743 log.debug('cache objects marked as invalid for cache args %s',
3744 safe_str(cache_uid))
3749 new_uid = cls.generate_new_state_uid()
3750 qry.update({"cache_state_uid": new_uid,
3751 "cache_args": f"repo_state:{time.time()}"})
3752 log.debug('cache object %s set new UID %s',
3753 safe_str(cache_uid), new_uid)
3745 3754
3746 3755 Session().commit()
3747 3756 except Exception:
3748 3757 log.exception(
3749 3758 'Cache key invalidation failed for cache args %s',
3750 3759 safe_str(cache_uid))
3751 3760 Session().rollback()
3752 3761
3753 3762 @classmethod
3754 3763 def get_active_cache(cls, cache_key):
3755 3764 inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
3756 3765 if inv_obj:
3757 3766 return inv_obj
3758 3767 return None
3759 3768
3760 3769 @classmethod
3761 3770 def get_namespace_map(cls, namespace):
3762 3771 return {
3763 3772 x.cache_key: x
3764 3773 for x in cls.query().filter(cls.cache_args == namespace)}
3765 3774
3766 3775
3767 3776 class ChangesetComment(Base, BaseModel):
3768 3777 __tablename__ = 'changeset_comments'
3769 3778 __table_args__ = (
3770 3779 Index('cc_revision_idx', 'revision'),
3771 3780 base_table_args,
3772 3781 )
3773 3782
3774 3783 COMMENT_OUTDATED = 'comment_outdated'
3775 3784 COMMENT_TYPE_NOTE = 'note'
3776 3785 COMMENT_TYPE_TODO = 'todo'
3777 3786 COMMENT_TYPES = [COMMENT_TYPE_NOTE, COMMENT_TYPE_TODO]
3778 3787
3779 3788 OP_IMMUTABLE = 'immutable'
3780 3789 OP_CHANGEABLE = 'changeable'
3781 3790
3782 3791 comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
3783 3792 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3784 3793 revision = Column('revision', String(40), nullable=True)
3785 3794 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3786 3795 pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True)
3787 3796 line_no = Column('line_no', Unicode(10), nullable=True)
3788 3797 hl_lines = Column('hl_lines', Unicode(512), nullable=True)
3789 3798 f_path = Column('f_path', Unicode(1000), nullable=True)
3790 3799 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3791 3800 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3792 3801 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3793 3802 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3794 3803 renderer = Column('renderer', Unicode(64), nullable=True)
3795 3804 display_state = Column('display_state', Unicode(128), nullable=True)
3796 3805 immutable_state = Column('immutable_state', Unicode(128), nullable=True, default=OP_CHANGEABLE)
3797 3806 draft = Column('draft', Boolean(), nullable=True, default=False)
3798 3807
3799 3808 comment_type = Column('comment_type', Unicode(128), nullable=True, default=COMMENT_TYPE_NOTE)
3800 3809 resolved_comment_id = Column('resolved_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=True)
3801 3810
3802 3811 resolved_comment = relationship('ChangesetComment', remote_side=comment_id, back_populates='resolved_by')
3803 3812 resolved_by = relationship('ChangesetComment', back_populates='resolved_comment')
3804 3813
3805 3814 author = relationship('User', lazy='select', back_populates='user_comments')
3806 3815 repo = relationship('Repository', back_populates='comments')
3807 3816 status_change = relationship('ChangesetStatus', cascade="all, delete-orphan", lazy='select', back_populates='comment')
3808 3817 pull_request = relationship('PullRequest', lazy='select', back_populates='comments')
3809 3818 pull_request_version = relationship('PullRequestVersion', lazy='select')
3810 3819 history = relationship('ChangesetCommentHistory', cascade='all, delete-orphan', lazy='select', order_by='ChangesetCommentHistory.version', back_populates="comment")
3811 3820
3812 3821 @classmethod
3813 3822 def get_users(cls, revision=None, pull_request_id=None):
3814 3823 """
3815 3824 Returns user associated with this ChangesetComment. ie those
3816 3825 who actually commented
3817 3826
3818 3827 :param cls:
3819 3828 :param revision:
3820 3829 """
3821 3830 q = Session().query(User).join(ChangesetComment.author)
3822 3831 if revision:
3823 3832 q = q.filter(cls.revision == revision)
3824 3833 elif pull_request_id:
3825 3834 q = q.filter(cls.pull_request_id == pull_request_id)
3826 3835 return q.all()
3827 3836
3828 3837 @classmethod
3829 3838 def get_index_from_version(cls, pr_version, versions=None, num_versions=None) -> int:
3830 3839 if pr_version is None:
3831 3840 return 0
3832 3841
3833 3842 if versions is not None:
3834 3843 num_versions = [x.pull_request_version_id for x in versions]
3835 3844
3836 3845 num_versions = num_versions or []
3837 3846 try:
3838 3847 return num_versions.index(pr_version) + 1
3839 3848 except (IndexError, ValueError):
3840 3849 return 0
3841 3850
3842 3851 @property
3843 3852 def outdated(self):
3844 3853 return self.display_state == self.COMMENT_OUTDATED
3845 3854
3846 3855 @property
3847 3856 def outdated_js(self):
3848 3857 return str_json(self.display_state == self.COMMENT_OUTDATED)
3849 3858
3850 3859 @property
3851 3860 def immutable(self):
3852 3861 return self.immutable_state == self.OP_IMMUTABLE
3853 3862
3854 3863 def outdated_at_version(self, version: int) -> bool:
3855 3864 """
3856 3865 Checks if comment is outdated for given pull request version
3857 3866 """
3858 3867
3859 3868 def version_check():
3860 3869 return self.pull_request_version_id and self.pull_request_version_id != version
3861 3870
3862 3871 if self.is_inline:
3863 3872 return self.outdated and version_check()
3864 3873 else:
3865 3874 # general comments don't have .outdated set, also latest don't have a version
3866 3875 return version_check()
3867 3876
3868 3877 def outdated_at_version_js(self, version):
3869 3878 """
3870 3879 Checks if comment is outdated for given pull request version
3871 3880 """
3872 3881 return str_json(self.outdated_at_version(version))
3873 3882
3874 3883 def older_than_version(self, version: int) -> bool:
3875 3884 """
3876 3885 Checks if comment is made from a previous version than given.
3877 3886 Assumes self.pull_request_version.pull_request_version_id is an integer if not None.
3878 3887 """
3879 3888
3880 3889 # If version is None, return False as the current version cannot be less than None
3881 3890 if version is None:
3882 3891 return False
3883 3892
3884 3893 # Ensure that the version is an integer to prevent TypeError on comparison
3885 3894 if not isinstance(version, int):
3886 3895 raise ValueError("The provided version must be an integer.")
3887 3896
3888 3897 # Initialize current version to 0 or pull_request_version_id if it's available
3889 3898 cur_ver = 0
3890 3899 if self.pull_request_version and self.pull_request_version.pull_request_version_id is not None:
3891 3900 cur_ver = self.pull_request_version.pull_request_version_id
3892 3901
3893 3902 # Return True if the current version is less than the given version
3894 3903 return cur_ver < version
3895 3904
3896 3905 def older_than_version_js(self, version):
3897 3906 """
3898 3907 Checks if comment is made from previous version than given
3899 3908 """
3900 3909 return str_json(self.older_than_version(version))
3901 3910
3902 3911 @property
3903 3912 def commit_id(self):
3904 3913 """New style naming to stop using .revision"""
3905 3914 return self.revision
3906 3915
3907 3916 @property
3908 3917 def resolved(self):
3909 3918 return self.resolved_by[0] if self.resolved_by else None
3910 3919
3911 3920 @property
3912 3921 def is_todo(self):
3913 3922 return self.comment_type == self.COMMENT_TYPE_TODO
3914 3923
3915 3924 @property
3916 3925 def is_inline(self):
3917 3926 if self.line_no and self.f_path:
3918 3927 return True
3919 3928 return False
3920 3929
3921 3930 @property
3922 3931 def last_version(self):
3923 3932 version = 0
3924 3933 if self.history:
3925 3934 version = self.history[-1].version
3926 3935 return version
3927 3936
3928 3937 def get_index_version(self, versions):
3929 3938 return self.get_index_from_version(
3930 3939 self.pull_request_version_id, versions)
3931 3940
3932 3941 @property
3933 3942 def review_status(self):
3934 3943 if self.status_change:
3935 3944 return self.status_change[0].status
3936 3945
3937 3946 @property
3938 3947 def review_status_lbl(self):
3939 3948 if self.status_change:
3940 3949 return self.status_change[0].status_lbl
3941 3950
3942 3951 def __repr__(self):
3943 3952 if self.comment_id:
3944 3953 return f'<DB:Comment #{self.comment_id}>'
3945 3954 else:
3946 3955 return f'<DB:Comment at {id(self)!r}>'
3947 3956
3948 3957 def get_api_data(self):
3949 3958 comment = self
3950 3959
3951 3960 data = {
3952 3961 'comment_id': comment.comment_id,
3953 3962 'comment_type': comment.comment_type,
3954 3963 'comment_text': comment.text,
3955 3964 'comment_status': comment.status_change,
3956 3965 'comment_f_path': comment.f_path,
3957 3966 'comment_lineno': comment.line_no,
3958 3967 'comment_author': comment.author,
3959 3968 'comment_created_on': comment.created_on,
3960 3969 'comment_resolved_by': self.resolved,
3961 3970 'comment_commit_id': comment.revision,
3962 3971 'comment_pull_request_id': comment.pull_request_id,
3963 3972 'comment_last_version': self.last_version
3964 3973 }
3965 3974 return data
3966 3975
3967 3976 def __json__(self):
3968 3977 data = dict()
3969 3978 data.update(self.get_api_data())
3970 3979 return data
3971 3980
3972 3981
3973 3982 class ChangesetCommentHistory(Base, BaseModel):
3974 3983 __tablename__ = 'changeset_comments_history'
3975 3984 __table_args__ = (
3976 3985 Index('cch_comment_id_idx', 'comment_id'),
3977 3986 base_table_args,
3978 3987 )
3979 3988
3980 3989 comment_history_id = Column('comment_history_id', Integer(), nullable=False, primary_key=True)
3981 3990 comment_id = Column('comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=False)
3982 3991 version = Column("version", Integer(), nullable=False, default=0)
3983 3992 created_by_user_id = Column('created_by_user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3984 3993 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3985 3994 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3986 3995 deleted = Column('deleted', Boolean(), default=False)
3987 3996
3988 3997 author = relationship('User', lazy='joined')
3989 3998 comment = relationship('ChangesetComment', cascade="all, delete", back_populates="history")
3990 3999
3991 4000 @classmethod
3992 4001 def get_version(cls, comment_id):
3993 4002 q = Session().query(ChangesetCommentHistory).filter(
3994 4003 ChangesetCommentHistory.comment_id == comment_id).order_by(ChangesetCommentHistory.version.desc())
3995 4004 if q.count() == 0:
3996 4005 return 1
3997 4006 elif q.count() >= q[0].version:
3998 4007 return q.count() + 1
3999 4008 else:
4000 4009 return q[0].version + 1
4001 4010
4002 4011
4003 4012 class ChangesetStatus(Base, BaseModel):
4004 4013 __tablename__ = 'changeset_statuses'
4005 4014 __table_args__ = (
4006 4015 Index('cs_revision_idx', 'revision'),
4007 4016 Index('cs_version_idx', 'version'),
4008 4017 UniqueConstraint('repo_id', 'revision', 'version'),
4009 4018 base_table_args
4010 4019 )
4011 4020
4012 4021 STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
4013 4022 STATUS_APPROVED = 'approved'
4014 4023 STATUS_REJECTED = 'rejected'
4015 4024 STATUS_UNDER_REVIEW = 'under_review'
4016 4025
4017 4026 STATUSES = [
4018 4027 (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default
4019 4028 (STATUS_APPROVED, _("Approved")),
4020 4029 (STATUS_REJECTED, _("Rejected")),
4021 4030 (STATUS_UNDER_REVIEW, _("Under Review")),
4022 4031 ]
4023 4032
4024 4033 changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
4025 4034 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
4026 4035 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
4027 4036 revision = Column('revision', String(40), nullable=False)
4028 4037 status = Column('status', String(128), nullable=False, default=DEFAULT)
4029 4038 changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
4030 4039 modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
4031 4040 version = Column('version', Integer(), nullable=False, default=0)
4032 4041 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
4033 4042
4034 4043 author = relationship('User', lazy='select')
4035 4044 repo = relationship('Repository', lazy='select')
4036 4045 comment = relationship('ChangesetComment', lazy='select', back_populates='status_change')
4037 4046 pull_request = relationship('PullRequest', lazy='select', back_populates='statuses')
4038 4047
4039 4048 def __repr__(self):
4040 4049 return f"<{self.cls_name}('{self.status}[v{self.version}]:{self.author}')>"
4041 4050
4042 4051 @classmethod
4043 4052 def get_status_lbl(cls, value):
4044 4053 return dict(cls.STATUSES).get(value)
4045 4054
4046 4055 @property
4047 4056 def status_lbl(self):
4048 4057 return ChangesetStatus.get_status_lbl(self.status)
4049 4058
4050 4059 def get_api_data(self):
4051 4060 status = self
4052 4061 data = {
4053 4062 'status_id': status.changeset_status_id,
4054 4063 'status': status.status,
4055 4064 }
4056 4065 return data
4057 4066
4058 4067 def __json__(self):
4059 4068 data = dict()
4060 4069 data.update(self.get_api_data())
4061 4070 return data
4062 4071
4063 4072
4064 4073 class _SetState(object):
4065 4074 """
4066 4075 Context processor allowing changing state for sensitive operation such as
4067 4076 pull request update or merge
4068 4077 """
4069 4078
4070 4079 def __init__(self, pull_request, pr_state, back_state=None):
4071 4080 self._pr = pull_request
4072 4081 self._org_state = back_state or pull_request.pull_request_state
4073 4082 self._pr_state = pr_state
4074 4083 self._current_state = None
4075 4084
4076 4085 def __enter__(self):
4077 4086 log.debug('StateLock: entering set state context of pr %s, setting state to: `%s`',
4078 4087 self._pr, self._pr_state)
4079 4088 self.set_pr_state(self._pr_state)
4080 4089 return self
4081 4090
4082 4091 def __exit__(self, exc_type, exc_val, exc_tb):
4083 4092 if exc_val is not None or exc_type is not None:
4084 4093 log.error(traceback.format_tb(exc_tb))
4085 4094 return None
4086 4095
4087 4096 self.set_pr_state(self._org_state)
4088 4097 log.debug('StateLock: exiting set state context of pr %s, setting state to: `%s`',
4089 4098 self._pr, self._org_state)
4090 4099
4091 4100 @property
4092 4101 def state(self):
4093 4102 return self._current_state
4094 4103
4095 4104 def set_pr_state(self, pr_state):
4096 4105 try:
4097 4106 self._pr.pull_request_state = pr_state
4098 4107 Session().add(self._pr)
4099 4108 Session().commit()
4100 4109 self._current_state = pr_state
4101 4110 except Exception:
4102 4111 log.exception('Failed to set PullRequest %s state to %s', self._pr, pr_state)
4103 4112 raise
4104 4113
4105 4114
4106 4115 class _PullRequestBase(BaseModel):
4107 4116 """
4108 4117 Common attributes of pull request and version entries.
4109 4118 """
4110 4119
4111 4120 # .status values
4112 4121 STATUS_NEW = 'new'
4113 4122 STATUS_OPEN = 'open'
4114 4123 STATUS_CLOSED = 'closed'
4115 4124
4116 4125 # available states
4117 4126 STATE_CREATING = 'creating'
4118 4127 STATE_UPDATING = 'updating'
4119 4128 STATE_MERGING = 'merging'
4120 4129 STATE_CREATED = 'created'
4121 4130
4122 4131 title = Column('title', Unicode(255), nullable=True)
4123 4132 description = Column(
4124 4133 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'),
4125 4134 nullable=True)
4126 4135 description_renderer = Column('description_renderer', Unicode(64), nullable=True)
4127 4136
4128 4137 # new/open/closed status of pull request (not approve/reject/etc)
4129 4138 status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW)
4130 4139 created_on = Column(
4131 4140 'created_on', DateTime(timezone=False), nullable=False,
4132 4141 default=datetime.datetime.now)
4133 4142 updated_on = Column(
4134 4143 'updated_on', DateTime(timezone=False), nullable=False,
4135 4144 default=datetime.datetime.now)
4136 4145
4137 4146 pull_request_state = Column("pull_request_state", String(255), nullable=True)
4138 4147
4139 4148 @declared_attr
4140 4149 def user_id(cls):
4141 4150 return Column(
4142 4151 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
4143 4152 unique=None)
4144 4153
4145 4154 # 500 revisions max
4146 4155 _revisions = Column(
4147 4156 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql'))
4148 4157
4149 4158 common_ancestor_id = Column('common_ancestor_id', Unicode(255), nullable=True)
4150 4159
4151 4160 @declared_attr
4152 4161 def source_repo_id(cls):
4153 4162 # TODO: dan: rename column to source_repo_id
4154 4163 return Column(
4155 4164 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'),
4156 4165 nullable=False)
4157 4166
4158 4167 @declared_attr
4159 4168 def pr_source(cls):
4160 4169 return relationship(
4161 4170 'Repository',
4162 4171 primaryjoin=f'{cls.__name__}.source_repo_id==Repository.repo_id',
4163 4172 overlaps="pull_requests_source"
4164 4173 )
4165 4174
4166 4175 _source_ref = Column('org_ref', Unicode(255), nullable=False)
4167 4176
4168 4177 @hybrid_property
4169 4178 def source_ref(self):
4170 4179 return self._source_ref
4171 4180
4172 4181 @source_ref.setter
4173 4182 def source_ref(self, val):
4174 4183 parts = (val or '').split(':')
4175 4184 if len(parts) != 3:
4176 4185 raise ValueError(
4177 4186 'Invalid reference format given: {}, expected X:Y:Z'.format(val))
4178 4187 self._source_ref = safe_str(val)
4179 4188
4180 4189 _target_ref = Column('other_ref', Unicode(255), nullable=False)
4181 4190
4182 4191 @hybrid_property
4183 4192 def target_ref(self):
4184 4193 return self._target_ref
4185 4194
4186 4195 @target_ref.setter
4187 4196 def target_ref(self, val):
4188 4197 parts = (val or '').split(':')
4189 4198 if len(parts) != 3:
4190 4199 raise ValueError(
4191 4200 'Invalid reference format given: {}, expected X:Y:Z'.format(val))
4192 4201 self._target_ref = safe_str(val)
4193 4202
4194 4203 @declared_attr
4195 4204 def target_repo_id(cls):
4196 4205 # TODO: dan: rename column to target_repo_id
4197 4206 return Column(
4198 4207 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'),
4199 4208 nullable=False)
4200 4209
4201 4210 @declared_attr
4202 4211 def pr_target(cls):
4203 4212 return relationship(
4204 4213 'Repository',
4205 4214 primaryjoin=f'{cls.__name__}.target_repo_id==Repository.repo_id',
4206 4215 overlaps="pull_requests_target"
4207 4216 )
4208 4217
4209 4218 _shadow_merge_ref = Column('shadow_merge_ref', Unicode(255), nullable=True)
4210 4219
4211 4220 # TODO: dan: rename column to last_merge_source_rev
4212 4221 _last_merge_source_rev = Column(
4213 4222 'last_merge_org_rev', String(40), nullable=True)
4214 4223 # TODO: dan: rename column to last_merge_target_rev
4215 4224 _last_merge_target_rev = Column(
4216 4225 'last_merge_other_rev', String(40), nullable=True)
4217 4226 _last_merge_status = Column('merge_status', Integer(), nullable=True)
4218 4227 last_merge_metadata = Column(
4219 4228 'last_merge_metadata', MutationObj.as_mutable(
4220 4229 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4221 4230
4222 4231 merge_rev = Column('merge_rev', String(40), nullable=True)
4223 4232
4224 4233 reviewer_data = Column(
4225 4234 'reviewer_data_json', MutationObj.as_mutable(
4226 4235 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4227 4236
4228 4237 @property
4229 4238 def reviewer_data_json(self):
4230 4239 return str_json(self.reviewer_data)
4231 4240
4232 4241 @property
4233 4242 def last_merge_metadata_parsed(self):
4234 4243 metadata = {}
4235 4244 if not self.last_merge_metadata:
4236 4245 return metadata
4237 4246
4238 4247 if hasattr(self.last_merge_metadata, 'de_coerce'):
4239 4248 for k, v in self.last_merge_metadata.de_coerce().items():
4240 4249 if k in ['target_ref', 'source_ref']:
4241 4250 metadata[k] = Reference(v['type'], v['name'], v['commit_id'])
4242 4251 else:
4243 4252 if hasattr(v, 'de_coerce'):
4244 4253 metadata[k] = v.de_coerce()
4245 4254 else:
4246 4255 metadata[k] = v
4247 4256 return metadata
4248 4257
4249 4258 @property
4250 4259 def work_in_progress(self):
4251 4260 """checks if pull request is work in progress by checking the title"""
4252 4261 title = self.title.upper()
4253 4262 if re.match(r'^(\[WIP\]\s*|WIP:\s*|WIP\s+)', title):
4254 4263 return True
4255 4264 return False
4256 4265
4257 4266 @property
4258 4267 def title_safe(self):
4259 4268 return self.title\
4260 4269 .replace('{', '{{')\
4261 4270 .replace('}', '}}')
4262 4271
4263 4272 @hybrid_property
4264 4273 def description_safe(self):
4265 4274 from rhodecode.lib import helpers as h
4266 4275 return h.escape(self.description)
4267 4276
4268 4277 @hybrid_property
4269 4278 def revisions(self):
4270 4279 return self._revisions.split(':') if self._revisions else []
4271 4280
4272 4281 @revisions.setter
4273 4282 def revisions(self, val):
4274 4283 self._revisions = ':'.join(val)
4275 4284
4276 4285 @hybrid_property
4277 4286 def last_merge_status(self):
4278 4287 return safe_int(self._last_merge_status)
4279 4288
4280 4289 @last_merge_status.setter
4281 4290 def last_merge_status(self, val):
4282 4291 self._last_merge_status = val
4283 4292
4284 4293 @declared_attr
4285 4294 def author(cls):
4286 4295 return relationship(
4287 4296 'User', lazy='joined',
4288 4297 #TODO, problem that is somehow :?
4289 4298 #back_populates='user_pull_requests'
4290 4299 )
4291 4300
4292 4301 @declared_attr
4293 4302 def source_repo(cls):
4294 4303 return relationship(
4295 4304 'Repository',
4296 4305 primaryjoin=f'{cls.__name__}.source_repo_id==Repository.repo_id',
4297 4306 #back_populates=''
4298 4307 )
4299 4308
4300 4309 @property
4301 4310 def source_ref_parts(self):
4302 4311 return self.unicode_to_reference(self.source_ref)
4303 4312
4304 4313 @declared_attr
4305 4314 def target_repo(cls):
4306 4315 return relationship(
4307 4316 'Repository',
4308 4317 primaryjoin=f'{cls.__name__}.target_repo_id==Repository.repo_id'
4309 4318 )
4310 4319
4311 4320 @property
4312 4321 def target_ref_parts(self):
4313 4322 return self.unicode_to_reference(self.target_ref)
4314 4323
4315 4324 @property
4316 4325 def shadow_merge_ref(self):
4317 4326 return self.unicode_to_reference(self._shadow_merge_ref)
4318 4327
4319 4328 @shadow_merge_ref.setter
4320 4329 def shadow_merge_ref(self, ref):
4321 4330 self._shadow_merge_ref = self.reference_to_unicode(ref)
4322 4331
4323 4332 @staticmethod
4324 4333 def unicode_to_reference(raw):
4325 4334 return unicode_to_reference(raw)
4326 4335
4327 4336 @staticmethod
4328 4337 def reference_to_unicode(ref):
4329 4338 return reference_to_unicode(ref)
4330 4339
4331 4340 def get_api_data(self, with_merge_state=True):
4332 4341 from rhodecode.model.pull_request import PullRequestModel
4333 4342
4334 4343 pull_request = self
4335 4344 if with_merge_state:
4336 4345 merge_response, merge_status, msg = \
4337 4346 PullRequestModel().merge_status(pull_request)
4338 4347 merge_state = {
4339 4348 'status': merge_status,
4340 4349 'message': safe_str(msg),
4341 4350 }
4342 4351 else:
4343 4352 merge_state = {'status': 'not_available',
4344 4353 'message': 'not_available'}
4345 4354
4346 4355 merge_data = {
4347 4356 'clone_url': PullRequestModel().get_shadow_clone_url(pull_request),
4348 4357 'reference': (
4349 4358 pull_request.shadow_merge_ref.asdict()
4350 4359 if pull_request.shadow_merge_ref else None),
4351 4360 }
4352 4361
4353 4362 data = {
4354 4363 'pull_request_id': pull_request.pull_request_id,
4355 4364 'url': PullRequestModel().get_url(pull_request),
4356 4365 'title': pull_request.title,
4357 4366 'description': pull_request.description,
4358 4367 'status': pull_request.status,
4359 4368 'state': pull_request.pull_request_state,
4360 4369 'created_on': pull_request.created_on,
4361 4370 'updated_on': pull_request.updated_on,
4362 4371 'commit_ids': pull_request.revisions,
4363 4372 'review_status': pull_request.calculated_review_status(),
4364 4373 'mergeable': merge_state,
4365 4374 'source': {
4366 4375 'clone_url': pull_request.source_repo.clone_url(),
4367 4376 'repository': pull_request.source_repo.repo_name,
4368 4377 'reference': {
4369 4378 'name': pull_request.source_ref_parts.name,
4370 4379 'type': pull_request.source_ref_parts.type,
4371 4380 'commit_id': pull_request.source_ref_parts.commit_id,
4372 4381 },
4373 4382 },
4374 4383 'target': {
4375 4384 'clone_url': pull_request.target_repo.clone_url(),
4376 4385 'repository': pull_request.target_repo.repo_name,
4377 4386 'reference': {
4378 4387 'name': pull_request.target_ref_parts.name,
4379 4388 'type': pull_request.target_ref_parts.type,
4380 4389 'commit_id': pull_request.target_ref_parts.commit_id,
4381 4390 },
4382 4391 },
4383 4392 'merge': merge_data,
4384 4393 'author': pull_request.author.get_api_data(include_secrets=False,
4385 4394 details='basic'),
4386 4395 'reviewers': [
4387 4396 {
4388 4397 'user': reviewer.get_api_data(include_secrets=False,
4389 4398 details='basic'),
4390 4399 'reasons': reasons,
4391 4400 'review_status': st[0][1].status if st else 'not_reviewed',
4392 4401 }
4393 4402 for obj, reviewer, reasons, mandatory, st in
4394 4403 pull_request.reviewers_statuses()
4395 4404 ]
4396 4405 }
4397 4406
4398 4407 return data
4399 4408
4400 4409 def set_state(self, pull_request_state, final_state=None):
4401 4410 """
4402 4411 # goes from initial state to updating to initial state.
4403 4412 # initial state can be changed by specifying back_state=
4404 4413 with pull_request_obj.set_state(PullRequest.STATE_UPDATING):
4405 4414 pull_request.merge()
4406 4415
4407 4416 :param pull_request_state:
4408 4417 :param final_state:
4409 4418
4410 4419 """
4411 4420
4412 4421 return _SetState(self, pull_request_state, back_state=final_state)
4413 4422
4414 4423
4415 4424 class PullRequest(Base, _PullRequestBase):
4416 4425 __tablename__ = 'pull_requests'
4417 4426 __table_args__ = (
4418 4427 base_table_args,
4419 4428 )
4420 4429 LATEST_VER = 'latest'
4421 4430
4422 4431 pull_request_id = Column(
4423 4432 'pull_request_id', Integer(), nullable=False, primary_key=True)
4424 4433
4425 4434 def __repr__(self):
4426 4435 if self.pull_request_id:
4427 4436 return f'<DB:PullRequest #{self.pull_request_id}>'
4428 4437 else:
4429 4438 return f'<DB:PullRequest at {id(self)!r}>'
4430 4439
4431 4440 reviewers = relationship('PullRequestReviewers', cascade="all, delete-orphan", back_populates='pull_request')
4432 4441 statuses = relationship('ChangesetStatus', cascade="all, delete-orphan", back_populates='pull_request')
4433 4442 comments = relationship('ChangesetComment', cascade="all, delete-orphan", back_populates='pull_request')
4434 4443 versions = relationship('PullRequestVersion', cascade="all, delete-orphan", lazy='dynamic', back_populates='pull_request')
4435 4444
4436 4445 @classmethod
4437 4446 def get_pr_display_object(cls, pull_request_obj, org_pull_request_obj,
4438 4447 internal_methods=None):
4439 4448
4440 4449 class PullRequestDisplay(object):
4441 4450 """
4442 4451 Special object wrapper for showing PullRequest data via Versions
4443 4452 It mimics PR object as close as possible. This is read only object
4444 4453 just for display
4445 4454 """
4446 4455
4447 4456 def __init__(self, attrs, internal=None):
4448 4457 self.attrs = attrs
4449 4458 # internal have priority over the given ones via attrs
4450 4459 self.internal = internal or ['versions']
4451 4460
4452 4461 def __getattr__(self, item):
4453 4462 if item in self.internal:
4454 4463 return getattr(self, item)
4455 4464 try:
4456 4465 return self.attrs[item]
4457 4466 except KeyError:
4458 4467 raise AttributeError(
4459 4468 '%s object has no attribute %s' % (self, item))
4460 4469
4461 4470 def __repr__(self):
4462 4471 pr_id = self.attrs.get('pull_request_id')
4463 4472 return f'<DB:PullRequestDisplay #{pr_id}>'
4464 4473
4465 4474 def versions(self):
4466 4475 return pull_request_obj.versions.order_by(
4467 4476 PullRequestVersion.pull_request_version_id).all()
4468 4477
4469 4478 def is_closed(self):
4470 4479 return pull_request_obj.is_closed()
4471 4480
4472 4481 def is_state_changing(self):
4473 4482 return pull_request_obj.is_state_changing()
4474 4483
4475 4484 @property
4476 4485 def pull_request_version_id(self):
4477 4486 return getattr(pull_request_obj, 'pull_request_version_id', None)
4478 4487
4479 4488 @property
4480 4489 def pull_request_last_version(self):
4481 4490 return pull_request_obj.pull_request_last_version
4482 4491
4483 4492 attrs = StrictAttributeDict(pull_request_obj.get_api_data(with_merge_state=False))
4484 4493
4485 4494 attrs.author = StrictAttributeDict(
4486 4495 pull_request_obj.author.get_api_data())
4487 4496 if pull_request_obj.target_repo:
4488 4497 attrs.target_repo = StrictAttributeDict(
4489 4498 pull_request_obj.target_repo.get_api_data())
4490 4499 attrs.target_repo.clone_url = pull_request_obj.target_repo.clone_url
4491 4500
4492 4501 if pull_request_obj.source_repo:
4493 4502 attrs.source_repo = StrictAttributeDict(
4494 4503 pull_request_obj.source_repo.get_api_data())
4495 4504 attrs.source_repo.clone_url = pull_request_obj.source_repo.clone_url
4496 4505
4497 4506 attrs.source_ref_parts = pull_request_obj.source_ref_parts
4498 4507 attrs.target_ref_parts = pull_request_obj.target_ref_parts
4499 4508 attrs.revisions = pull_request_obj.revisions
4500 4509 attrs.common_ancestor_id = pull_request_obj.common_ancestor_id
4501 4510 attrs.shadow_merge_ref = org_pull_request_obj.shadow_merge_ref
4502 4511 attrs.reviewer_data = org_pull_request_obj.reviewer_data
4503 4512 attrs.reviewer_data_json = org_pull_request_obj.reviewer_data_json
4504 4513
4505 4514 return PullRequestDisplay(attrs, internal=internal_methods)
4506 4515
4507 4516 def is_closed(self):
4508 4517 return self.status == self.STATUS_CLOSED
4509 4518
4510 4519 def is_state_changing(self):
4511 4520 return self.pull_request_state != PullRequest.STATE_CREATED
4512 4521
4513 4522 def __json__(self):
4514 4523 return {
4515 4524 'revisions': self.revisions,
4516 4525 'versions': self.versions_count
4517 4526 }
4518 4527
4519 4528 def calculated_review_status(self):
4520 4529 from rhodecode.model.changeset_status import ChangesetStatusModel
4521 4530 return ChangesetStatusModel().calculated_review_status(self)
4522 4531
4523 4532 def reviewers_statuses(self, user=None):
4524 4533 from rhodecode.model.changeset_status import ChangesetStatusModel
4525 4534 return ChangesetStatusModel().reviewers_statuses(self, user=user)
4526 4535
4527 4536 def get_pull_request_reviewers(self, role=None):
4528 4537 qry = PullRequestReviewers.query()\
4529 4538 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)
4530 4539 if role:
4531 4540 qry = qry.filter(PullRequestReviewers.role == role)
4532 4541
4533 4542 return qry.all()
4534 4543
4535 4544 @property
4536 4545 def reviewers_count(self):
4537 4546 qry = PullRequestReviewers.query()\
4538 4547 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)\
4539 4548 .filter(PullRequestReviewers.role == PullRequestReviewers.ROLE_REVIEWER)
4540 4549 return qry.count()
4541 4550
4542 4551 @property
4543 4552 def observers_count(self):
4544 4553 qry = PullRequestReviewers.query()\
4545 4554 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)\
4546 4555 .filter(PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER)
4547 4556 return qry.count()
4548 4557
4549 4558 def observers(self):
4550 4559 qry = PullRequestReviewers.query()\
4551 4560 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)\
4552 4561 .filter(PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER)\
4553 4562 .all()
4554 4563
4555 4564 for entry in qry:
4556 4565 yield entry, entry.user
4557 4566
4558 4567 @property
4559 4568 def workspace_id(self):
4560 4569 from rhodecode.model.pull_request import PullRequestModel
4561 4570 return PullRequestModel()._workspace_id(self)
4562 4571
4563 4572 def get_shadow_repo(self):
4564 4573 workspace_id = self.workspace_id
4565 4574 shadow_repository_path = self.target_repo.get_shadow_repository_path(workspace_id)
4566 4575 if os.path.isdir(shadow_repository_path):
4567 4576 vcs_obj = self.target_repo.scm_instance()
4568 4577 return vcs_obj.get_shadow_instance(shadow_repository_path)
4569 4578
4570 4579 @property
4571 4580 def versions_count(self):
4572 4581 """
4573 4582 return number of versions this PR have, e.g a PR that once been
4574 4583 updated will have 2 versions
4575 4584 """
4576 4585 return self.versions.count() + 1
4577 4586
4578 4587 @property
4579 4588 def pull_request_last_version(self):
4580 4589 return self.versions_count
4581 4590
4582 4591
4583 4592 class PullRequestVersion(Base, _PullRequestBase):
4584 4593 __tablename__ = 'pull_request_versions'
4585 4594 __table_args__ = (
4586 4595 base_table_args,
4587 4596 )
4588 4597
4589 4598 pull_request_version_id = Column('pull_request_version_id', Integer(), nullable=False, primary_key=True)
4590 4599 pull_request_id = Column('pull_request_id', Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=False)
4591 4600 pull_request = relationship('PullRequest', back_populates='versions')
4592 4601
4593 4602 def __repr__(self):
4594 4603 if self.pull_request_version_id:
4595 4604 return f'<DB:PullRequestVersion #{self.pull_request_version_id}>'
4596 4605 else:
4597 4606 return f'<DB:PullRequestVersion at {id(self)!r}>'
4598 4607
4599 4608 @property
4600 4609 def reviewers(self):
4601 4610 return self.pull_request.reviewers
4602 4611
4603 4612 @property
4604 4613 def versions(self):
4605 4614 return self.pull_request.versions
4606 4615
4607 4616 def is_closed(self):
4608 4617 # calculate from original
4609 4618 return self.pull_request.status == self.STATUS_CLOSED
4610 4619
4611 4620 def is_state_changing(self):
4612 4621 return self.pull_request.pull_request_state != PullRequest.STATE_CREATED
4613 4622
4614 4623 def calculated_review_status(self):
4615 4624 return self.pull_request.calculated_review_status()
4616 4625
4617 4626 def reviewers_statuses(self):
4618 4627 return self.pull_request.reviewers_statuses()
4619 4628
4620 4629 def observers(self):
4621 4630 return self.pull_request.observers()
4622 4631
4623 4632
4624 4633 class PullRequestReviewers(Base, BaseModel):
4625 4634 __tablename__ = 'pull_request_reviewers'
4626 4635 __table_args__ = (
4627 4636 base_table_args,
4628 4637 )
4629 4638 ROLE_REVIEWER = 'reviewer'
4630 4639 ROLE_OBSERVER = 'observer'
4631 4640 ROLES = [ROLE_REVIEWER, ROLE_OBSERVER]
4632 4641
4633 4642 @hybrid_property
4634 4643 def reasons(self):
4635 4644 if not self._reasons:
4636 4645 return []
4637 4646 return self._reasons
4638 4647
4639 4648 @reasons.setter
4640 4649 def reasons(self, val):
4641 4650 val = val or []
4642 4651 if any(not isinstance(x, str) for x in val):
4643 4652 raise Exception('invalid reasons type, must be list of strings')
4644 4653 self._reasons = val
4645 4654
4646 4655 pull_requests_reviewers_id = Column(
4647 4656 'pull_requests_reviewers_id', Integer(), nullable=False,
4648 4657 primary_key=True)
4649 4658 pull_request_id = Column(
4650 4659 "pull_request_id", Integer(),
4651 4660 ForeignKey('pull_requests.pull_request_id'), nullable=False)
4652 4661 user_id = Column(
4653 4662 "user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
4654 4663 _reasons = Column(
4655 4664 'reason', MutationList.as_mutable(
4656 4665 JsonType('list', dialect_map=dict(mysql=UnicodeText(16384)))))
4657 4666
4658 4667 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4659 4668 role = Column('role', Unicode(255), nullable=True, default=ROLE_REVIEWER)
4660 4669
4661 4670 user = relationship('User')
4662 4671 pull_request = relationship('PullRequest', back_populates='reviewers')
4663 4672
4664 4673 rule_data = Column(
4665 4674 'rule_data_json',
4666 4675 JsonType(dialect_map=dict(mysql=UnicodeText(16384))))
4667 4676
4668 4677 def rule_user_group_data(self):
4669 4678 """
4670 4679 Returns the voting user group rule data for this reviewer
4671 4680 """
4672 4681
4673 4682 if self.rule_data and 'vote_rule' in self.rule_data:
4674 4683 user_group_data = {}
4675 4684 if 'rule_user_group_entry_id' in self.rule_data:
4676 4685 # means a group with voting rules !
4677 4686 user_group_data['id'] = self.rule_data['rule_user_group_entry_id']
4678 4687 user_group_data['name'] = self.rule_data['rule_name']
4679 4688 user_group_data['vote_rule'] = self.rule_data['vote_rule']
4680 4689
4681 4690 return user_group_data
4682 4691
4683 4692 @classmethod
4684 4693 def get_pull_request_reviewers(cls, pull_request_id, role=None):
4685 4694 qry = PullRequestReviewers.query()\
4686 4695 .filter(PullRequestReviewers.pull_request_id == pull_request_id)
4687 4696 if role:
4688 4697 qry = qry.filter(PullRequestReviewers.role == role)
4689 4698
4690 4699 return qry.all()
4691 4700
4692 4701 def __repr__(self):
4693 4702 return f"<{self.cls_name}('id:{self.pull_requests_reviewers_id}')>"
4694 4703
4695 4704
4696 4705 class Notification(Base, BaseModel):
4697 4706 __tablename__ = 'notifications'
4698 4707 __table_args__ = (
4699 4708 Index('notification_type_idx', 'type'),
4700 4709 base_table_args,
4701 4710 )
4702 4711
4703 4712 TYPE_CHANGESET_COMMENT = 'cs_comment'
4704 4713 TYPE_MESSAGE = 'message'
4705 4714 TYPE_MENTION = 'mention'
4706 4715 TYPE_REGISTRATION = 'registration'
4707 4716 TYPE_PULL_REQUEST = 'pull_request'
4708 4717 TYPE_PULL_REQUEST_COMMENT = 'pull_request_comment'
4709 4718 TYPE_PULL_REQUEST_UPDATE = 'pull_request_update'
4710 4719
4711 4720 notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
4712 4721 subject = Column('subject', Unicode(512), nullable=True)
4713 4722 body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
4714 4723 created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
4715 4724 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4716 4725 type_ = Column('type', Unicode(255))
4717 4726
4718 4727 created_by_user = relationship('User', back_populates='user_created_notifications')
4719 4728 notifications_to_users = relationship('UserNotification', lazy='joined', cascade="all, delete-orphan", back_populates='notification')
4720 4729
4721 4730 @property
4722 4731 def recipients(self):
4723 4732 return [x.user for x in UserNotification.query()\
4724 4733 .filter(UserNotification.notification == self)\
4725 4734 .order_by(UserNotification.user_id.asc()).all()]
4726 4735
4727 4736 @classmethod
4728 4737 def create(cls, created_by, subject, body, recipients, type_=None):
4729 4738 if type_ is None:
4730 4739 type_ = Notification.TYPE_MESSAGE
4731 4740
4732 4741 notification = cls()
4733 4742 notification.created_by_user = created_by
4734 4743 notification.subject = subject
4735 4744 notification.body = body
4736 4745 notification.type_ = type_
4737 4746 notification.created_on = datetime.datetime.now()
4738 4747
4739 4748 # For each recipient link the created notification to his account
4740 4749 for u in recipients:
4741 4750 assoc = UserNotification()
4742 4751 assoc.user_id = u.user_id
4743 4752 assoc.notification = notification
4744 4753
4745 4754 # if created_by is inside recipients mark his notification
4746 4755 # as read
4747 4756 if u.user_id == created_by.user_id:
4748 4757 assoc.read = True
4749 4758 Session().add(assoc)
4750 4759
4751 4760 Session().add(notification)
4752 4761
4753 4762 return notification
4754 4763
4755 4764
4756 4765 class UserNotification(Base, BaseModel):
4757 4766 __tablename__ = 'user_to_notification'
4758 4767 __table_args__ = (
4759 4768 UniqueConstraint('user_id', 'notification_id'),
4760 4769 base_table_args
4761 4770 )
4762 4771
4763 4772 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
4764 4773 notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
4765 4774 read = Column('read', Boolean, default=False)
4766 4775 sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
4767 4776
4768 4777 user = relationship('User', lazy="joined", back_populates='notifications')
4769 4778 notification = relationship('Notification', lazy="joined", order_by=lambda: Notification.created_on.desc(), back_populates='notifications_to_users')
4770 4779
4771 4780 def mark_as_read(self):
4772 4781 self.read = True
4773 4782 Session().add(self)
4774 4783
4775 4784
4776 4785 class UserNotice(Base, BaseModel):
4777 4786 __tablename__ = 'user_notices'
4778 4787 __table_args__ = (
4779 4788 base_table_args
4780 4789 )
4781 4790
4782 4791 NOTIFICATION_TYPE_MESSAGE = 'message'
4783 4792 NOTIFICATION_TYPE_NOTICE = 'notice'
4784 4793
4785 4794 NOTIFICATION_LEVEL_INFO = 'info'
4786 4795 NOTIFICATION_LEVEL_WARNING = 'warning'
4787 4796 NOTIFICATION_LEVEL_ERROR = 'error'
4788 4797
4789 4798 user_notice_id = Column('gist_id', Integer(), primary_key=True)
4790 4799
4791 4800 notice_subject = Column('notice_subject', Unicode(512), nullable=True)
4792 4801 notice_body = Column('notice_body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
4793 4802
4794 4803 notice_read = Column('notice_read', Boolean, default=False)
4795 4804
4796 4805 notification_level = Column('notification_level', String(1024), default=NOTIFICATION_LEVEL_INFO)
4797 4806 notification_type = Column('notification_type', String(1024), default=NOTIFICATION_TYPE_NOTICE)
4798 4807
4799 4808 notice_created_by = Column('notice_created_by', Integer(), ForeignKey('users.user_id'), nullable=True)
4800 4809 notice_created_on = Column('notice_created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4801 4810
4802 4811 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'))
4803 4812 user = relationship('User', lazy="joined", primaryjoin='User.user_id==UserNotice.user_id')
4804 4813
4805 4814 @classmethod
4806 4815 def create_for_user(cls, user, subject, body, notice_level=NOTIFICATION_LEVEL_INFO, allow_duplicate=False):
4807 4816
4808 4817 if notice_level not in [cls.NOTIFICATION_LEVEL_ERROR,
4809 4818 cls.NOTIFICATION_LEVEL_WARNING,
4810 4819 cls.NOTIFICATION_LEVEL_INFO]:
4811 4820 return
4812 4821
4813 4822 from rhodecode.model.user import UserModel
4814 4823 user = UserModel().get_user(user)
4815 4824
4816 4825 new_notice = UserNotice()
4817 4826 if not allow_duplicate:
4818 4827 existing_msg = UserNotice().query() \
4819 4828 .filter(UserNotice.user == user) \
4820 4829 .filter(UserNotice.notice_body == body) \
4821 4830 .filter(UserNotice.notice_read == false()) \
4822 4831 .scalar()
4823 4832 if existing_msg:
4824 4833 log.warning('Ignoring duplicate notice for user %s', user)
4825 4834 return
4826 4835
4827 4836 new_notice.user = user
4828 4837 new_notice.notice_subject = subject
4829 4838 new_notice.notice_body = body
4830 4839 new_notice.notification_level = notice_level
4831 4840 Session().add(new_notice)
4832 4841 Session().commit()
4833 4842
4834 4843
4835 4844 class Gist(Base, BaseModel):
4836 4845 __tablename__ = 'gists'
4837 4846 __table_args__ = (
4838 4847 Index('g_gist_access_id_idx', 'gist_access_id'),
4839 4848 Index('g_created_on_idx', 'created_on'),
4840 4849 base_table_args
4841 4850 )
4842 4851
4843 4852 GIST_PUBLIC = 'public'
4844 4853 GIST_PRIVATE = 'private'
4845 4854 DEFAULT_FILENAME = 'gistfile1.txt'
4846 4855
4847 4856 ACL_LEVEL_PUBLIC = 'acl_public'
4848 4857 ACL_LEVEL_PRIVATE = 'acl_private'
4849 4858
4850 4859 gist_id = Column('gist_id', Integer(), primary_key=True)
4851 4860 gist_access_id = Column('gist_access_id', Unicode(250))
4852 4861 gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
4853 4862 gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
4854 4863 gist_expires = Column('gist_expires', Float(53), nullable=False)
4855 4864 gist_type = Column('gist_type', Unicode(128), nullable=False)
4856 4865 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4857 4866 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4858 4867 acl_level = Column('acl_level', Unicode(128), nullable=True)
4859 4868
4860 4869 owner = relationship('User', back_populates='user_gists')
4861 4870
4862 4871 def __repr__(self):
4863 4872 return f'<Gist:[{self.gist_type}]{self.gist_access_id}>'
4864 4873
4865 4874 @hybrid_property
4866 4875 def description_safe(self):
4867 4876 from rhodecode.lib import helpers as h
4868 4877 return h.escape(self.gist_description)
4869 4878
4870 4879 @classmethod
4871 4880 def get_or_404(cls, id_):
4872 4881 from pyramid.httpexceptions import HTTPNotFound
4873 4882
4874 4883 res = cls.query().filter(cls.gist_access_id == id_).scalar()
4875 4884 if not res:
4876 4885 log.debug('WARN: No DB entry with id %s', id_)
4877 4886 raise HTTPNotFound()
4878 4887 return res
4879 4888
4880 4889 @classmethod
4881 4890 def get_by_access_id(cls, gist_access_id):
4882 4891 return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
4883 4892
4884 4893 def gist_url(self):
4885 4894 from rhodecode.model.gist import GistModel
4886 4895 return GistModel().get_url(self)
4887 4896
4888 4897 @classmethod
4889 4898 def base_path(cls):
4890 4899 """
4891 4900 Returns base path when all gists are stored
4892 4901
4893 4902 :param cls:
4894 4903 """
4895 4904 from rhodecode.model.gist import GIST_STORE_LOC
4896 4905 q = Session().query(RhodeCodeUi)\
4897 4906 .filter(RhodeCodeUi.ui_key == URL_SEP)
4898 4907 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
4899 4908 return os.path.join(q.one().ui_value, GIST_STORE_LOC)
4900 4909
4901 4910 def get_api_data(self):
4902 4911 """
4903 4912 Common function for generating gist related data for API
4904 4913 """
4905 4914 gist = self
4906 4915 data = {
4907 4916 'gist_id': gist.gist_id,
4908 4917 'type': gist.gist_type,
4909 4918 'access_id': gist.gist_access_id,
4910 4919 'description': gist.gist_description,
4911 4920 'url': gist.gist_url(),
4912 4921 'expires': gist.gist_expires,
4913 4922 'created_on': gist.created_on,
4914 4923 'modified_at': gist.modified_at,
4915 4924 'content': None,
4916 4925 'acl_level': gist.acl_level,
4917 4926 }
4918 4927 return data
4919 4928
4920 4929 def __json__(self):
4921 4930 data = dict(
4922 4931 )
4923 4932 data.update(self.get_api_data())
4924 4933 return data
4925 4934 # SCM functions
4926 4935
4927 4936 def scm_instance(self, **kwargs):
4928 4937 """
4929 4938 Get an instance of VCS Repository
4930 4939
4931 4940 :param kwargs:
4932 4941 """
4933 4942 from rhodecode.model.gist import GistModel
4934 4943 full_repo_path = os.path.join(self.base_path(), self.gist_access_id)
4935 4944 return get_vcs_instance(
4936 4945 repo_path=safe_str(full_repo_path), create=False,
4937 4946 _vcs_alias=GistModel.vcs_backend)
4938 4947
4939 4948
4940 4949 class ExternalIdentity(Base, BaseModel):
4941 4950 __tablename__ = 'external_identities'
4942 4951 __table_args__ = (
4943 4952 Index('local_user_id_idx', 'local_user_id'),
4944 4953 Index('external_id_idx', 'external_id'),
4945 4954 base_table_args
4946 4955 )
4947 4956
4948 4957 external_id = Column('external_id', Unicode(255), default='', primary_key=True)
4949 4958 external_username = Column('external_username', Unicode(1024), default='')
4950 4959 local_user_id = Column('local_user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
4951 4960 provider_name = Column('provider_name', Unicode(255), default='', primary_key=True)
4952 4961 access_token = Column('access_token', String(1024), default='')
4953 4962 alt_token = Column('alt_token', String(1024), default='')
4954 4963 token_secret = Column('token_secret', String(1024), default='')
4955 4964
4956 4965 @classmethod
4957 4966 def by_external_id_and_provider(cls, external_id, provider_name, local_user_id=None):
4958 4967 """
4959 4968 Returns ExternalIdentity instance based on search params
4960 4969
4961 4970 :param external_id:
4962 4971 :param provider_name:
4963 4972 :return: ExternalIdentity
4964 4973 """
4965 4974 query = cls.query()
4966 4975 query = query.filter(cls.external_id == external_id)
4967 4976 query = query.filter(cls.provider_name == provider_name)
4968 4977 if local_user_id:
4969 4978 query = query.filter(cls.local_user_id == local_user_id)
4970 4979 return query.first()
4971 4980
4972 4981 @classmethod
4973 4982 def user_by_external_id_and_provider(cls, external_id, provider_name):
4974 4983 """
4975 4984 Returns User instance based on search params
4976 4985
4977 4986 :param external_id:
4978 4987 :param provider_name:
4979 4988 :return: User
4980 4989 """
4981 4990 query = User.query()
4982 4991 query = query.filter(cls.external_id == external_id)
4983 4992 query = query.filter(cls.provider_name == provider_name)
4984 4993 query = query.filter(User.user_id == cls.local_user_id)
4985 4994 return query.first()
4986 4995
4987 4996 @classmethod
4988 4997 def by_local_user_id(cls, local_user_id):
4989 4998 """
4990 4999 Returns all tokens for user
4991 5000
4992 5001 :param local_user_id:
4993 5002 :return: ExternalIdentity
4994 5003 """
4995 5004 query = cls.query()
4996 5005 query = query.filter(cls.local_user_id == local_user_id)
4997 5006 return query
4998 5007
4999 5008 @classmethod
5000 5009 def load_provider_plugin(cls, plugin_id):
5001 5010 from rhodecode.authentication.base import loadplugin
5002 5011 _plugin_id = 'egg:rhodecode-enterprise-ee#{}'.format(plugin_id)
5003 5012 auth_plugin = loadplugin(_plugin_id)
5004 5013 return auth_plugin
5005 5014
5006 5015
5007 5016 class Integration(Base, BaseModel):
5008 5017 __tablename__ = 'integrations'
5009 5018 __table_args__ = (
5010 5019 base_table_args
5011 5020 )
5012 5021
5013 5022 integration_id = Column('integration_id', Integer(), primary_key=True)
5014 5023 integration_type = Column('integration_type', String(255))
5015 5024 enabled = Column('enabled', Boolean(), nullable=False)
5016 5025 name = Column('name', String(255), nullable=False)
5017 5026 child_repos_only = Column('child_repos_only', Boolean(), nullable=False, default=False)
5018 5027
5019 5028 settings = Column(
5020 5029 'settings_json', MutationObj.as_mutable(
5021 5030 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
5022 5031 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
5023 5032 repo = relationship('Repository', lazy='joined', back_populates='integrations')
5024 5033
5025 5034 repo_group_id = Column('repo_group_id', Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
5026 5035 repo_group = relationship('RepoGroup', lazy='joined', back_populates='integrations')
5027 5036
5028 5037 @property
5029 5038 def scope(self):
5030 5039 if self.repo:
5031 5040 return repr(self.repo)
5032 5041 if self.repo_group:
5033 5042 if self.child_repos_only:
5034 5043 return repr(self.repo_group) + ' (child repos only)'
5035 5044 else:
5036 5045 return repr(self.repo_group) + ' (recursive)'
5037 5046 if self.child_repos_only:
5038 5047 return 'root_repos'
5039 5048 return 'global'
5040 5049
5041 5050 def __repr__(self):
5042 5051 return '<Integration(%r, %r)>' % (self.integration_type, self.scope)
5043 5052
5044 5053
5045 5054 class RepoReviewRuleUser(Base, BaseModel):
5046 5055 __tablename__ = 'repo_review_rules_users'
5047 5056 __table_args__ = (
5048 5057 base_table_args
5049 5058 )
5050 5059 ROLE_REVIEWER = 'reviewer'
5051 5060 ROLE_OBSERVER = 'observer'
5052 5061 ROLES = [ROLE_REVIEWER, ROLE_OBSERVER]
5053 5062
5054 5063 repo_review_rule_user_id = Column('repo_review_rule_user_id', Integer(), primary_key=True)
5055 5064 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
5056 5065 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False)
5057 5066 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
5058 5067 role = Column('role', Unicode(255), nullable=True, default=ROLE_REVIEWER)
5059 5068 user = relationship('User', back_populates='user_review_rules')
5060 5069
5061 5070 def rule_data(self):
5062 5071 return {
5063 5072 'mandatory': self.mandatory,
5064 5073 'role': self.role,
5065 5074 }
5066 5075
5067 5076
5068 5077 class RepoReviewRuleUserGroup(Base, BaseModel):
5069 5078 __tablename__ = 'repo_review_rules_users_groups'
5070 5079 __table_args__ = (
5071 5080 base_table_args
5072 5081 )
5073 5082
5074 5083 VOTE_RULE_ALL = -1
5075 5084 ROLE_REVIEWER = 'reviewer'
5076 5085 ROLE_OBSERVER = 'observer'
5077 5086 ROLES = [ROLE_REVIEWER, ROLE_OBSERVER]
5078 5087
5079 5088 repo_review_rule_users_group_id = Column('repo_review_rule_users_group_id', Integer(), primary_key=True)
5080 5089 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
5081 5090 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False)
5082 5091 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
5083 5092 role = Column('role', Unicode(255), nullable=True, default=ROLE_REVIEWER)
5084 5093 vote_rule = Column("vote_rule", Integer(), nullable=True, default=VOTE_RULE_ALL)
5085 5094 users_group = relationship('UserGroup')
5086 5095
5087 5096 def rule_data(self):
5088 5097 return {
5089 5098 'mandatory': self.mandatory,
5090 5099 'role': self.role,
5091 5100 'vote_rule': self.vote_rule
5092 5101 }
5093 5102
5094 5103 @property
5095 5104 def vote_rule_label(self):
5096 5105 if not self.vote_rule or self.vote_rule == self.VOTE_RULE_ALL:
5097 5106 return 'all must vote'
5098 5107 else:
5099 5108 return 'min. vote {}'.format(self.vote_rule)
5100 5109
5101 5110
5102 5111 class RepoReviewRule(Base, BaseModel):
5103 5112 __tablename__ = 'repo_review_rules'
5104 5113 __table_args__ = (
5105 5114 base_table_args
5106 5115 )
5107 5116
5108 5117 repo_review_rule_id = Column(
5109 5118 'repo_review_rule_id', Integer(), primary_key=True)
5110 5119 repo_id = Column(
5111 5120 "repo_id", Integer(), ForeignKey('repositories.repo_id'))
5112 5121 repo = relationship('Repository', back_populates='review_rules')
5113 5122
5114 5123 review_rule_name = Column('review_rule_name', String(255))
5115 5124 _branch_pattern = Column("branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default='*') # glob
5116 5125 _target_branch_pattern = Column("target_branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default='*') # glob
5117 5126 _file_pattern = Column("file_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default='*') # glob
5118 5127
5119 5128 use_authors_for_review = Column("use_authors_for_review", Boolean(), nullable=False, default=False)
5120 5129
5121 5130 # Legacy fields, just for backward compat
5122 5131 _forbid_author_to_review = Column("forbid_author_to_review", Boolean(), nullable=False, default=False)
5123 5132 _forbid_commit_author_to_review = Column("forbid_commit_author_to_review", Boolean(), nullable=False, default=False)
5124 5133
5125 5134 pr_author = Column("pr_author", UnicodeText().with_variant(UnicodeText(255), 'mysql'), nullable=True)
5126 5135 commit_author = Column("commit_author", UnicodeText().with_variant(UnicodeText(255), 'mysql'), nullable=True)
5127 5136
5128 5137 forbid_adding_reviewers = Column("forbid_adding_reviewers", Boolean(), nullable=False, default=False)
5129 5138
5130 5139 rule_users = relationship('RepoReviewRuleUser')
5131 5140 rule_user_groups = relationship('RepoReviewRuleUserGroup')
5132 5141
5133 5142 def _validate_pattern(self, value):
5134 5143 re.compile('^' + glob2re(value) + '$')
5135 5144
5136 5145 @hybrid_property
5137 5146 def source_branch_pattern(self):
5138 5147 return self._branch_pattern or '*'
5139 5148
5140 5149 @source_branch_pattern.setter
5141 5150 def source_branch_pattern(self, value):
5142 5151 self._validate_pattern(value)
5143 5152 self._branch_pattern = value or '*'
5144 5153
5145 5154 @hybrid_property
5146 5155 def target_branch_pattern(self):
5147 5156 return self._target_branch_pattern or '*'
5148 5157
5149 5158 @target_branch_pattern.setter
5150 5159 def target_branch_pattern(self, value):
5151 5160 self._validate_pattern(value)
5152 5161 self._target_branch_pattern = value or '*'
5153 5162
5154 5163 @hybrid_property
5155 5164 def file_pattern(self):
5156 5165 return self._file_pattern or '*'
5157 5166
5158 5167 @file_pattern.setter
5159 5168 def file_pattern(self, value):
5160 5169 self._validate_pattern(value)
5161 5170 self._file_pattern = value or '*'
5162 5171
5163 5172 @hybrid_property
5164 5173 def forbid_pr_author_to_review(self):
5165 5174 return self.pr_author == 'forbid_pr_author'
5166 5175
5167 5176 @hybrid_property
5168 5177 def include_pr_author_to_review(self):
5169 5178 return self.pr_author == 'include_pr_author'
5170 5179
5171 5180 @hybrid_property
5172 5181 def forbid_commit_author_to_review(self):
5173 5182 return self.commit_author == 'forbid_commit_author'
5174 5183
5175 5184 @hybrid_property
5176 5185 def include_commit_author_to_review(self):
5177 5186 return self.commit_author == 'include_commit_author'
5178 5187
5179 5188 def matches(self, source_branch, target_branch, files_changed):
5180 5189 """
5181 5190 Check if this review rule matches a branch/files in a pull request
5182 5191
5183 5192 :param source_branch: source branch name for the commit
5184 5193 :param target_branch: target branch name for the commit
5185 5194 :param files_changed: list of file paths changed in the pull request
5186 5195 """
5187 5196
5188 5197 source_branch = source_branch or ''
5189 5198 target_branch = target_branch or ''
5190 5199 files_changed = files_changed or []
5191 5200
5192 5201 branch_matches = True
5193 5202 if source_branch or target_branch:
5194 5203 if self.source_branch_pattern == '*':
5195 5204 source_branch_match = True
5196 5205 else:
5197 5206 if self.source_branch_pattern.startswith('re:'):
5198 5207 source_pattern = self.source_branch_pattern[3:]
5199 5208 else:
5200 5209 source_pattern = '^' + glob2re(self.source_branch_pattern) + '$'
5201 5210 source_branch_regex = re.compile(source_pattern)
5202 5211 source_branch_match = bool(source_branch_regex.search(source_branch))
5203 5212 if self.target_branch_pattern == '*':
5204 5213 target_branch_match = True
5205 5214 else:
5206 5215 if self.target_branch_pattern.startswith('re:'):
5207 5216 target_pattern = self.target_branch_pattern[3:]
5208 5217 else:
5209 5218 target_pattern = '^' + glob2re(self.target_branch_pattern) + '$'
5210 5219 target_branch_regex = re.compile(target_pattern)
5211 5220 target_branch_match = bool(target_branch_regex.search(target_branch))
5212 5221
5213 5222 branch_matches = source_branch_match and target_branch_match
5214 5223
5215 5224 files_matches = True
5216 5225 if self.file_pattern != '*':
5217 5226 files_matches = False
5218 5227 if self.file_pattern.startswith('re:'):
5219 5228 file_pattern = self.file_pattern[3:]
5220 5229 else:
5221 5230 file_pattern = glob2re(self.file_pattern)
5222 5231 file_regex = re.compile(file_pattern)
5223 5232 for file_data in files_changed:
5224 5233 filename = file_data.get('filename')
5225 5234
5226 5235 if file_regex.search(filename):
5227 5236 files_matches = True
5228 5237 break
5229 5238
5230 5239 return branch_matches and files_matches
5231 5240
5232 5241 @property
5233 5242 def review_users(self):
5234 5243 """ Returns the users which this rule applies to """
5235 5244
5236 5245 users = collections.OrderedDict()
5237 5246
5238 5247 for rule_user in self.rule_users:
5239 5248 if rule_user.user.active:
5240 5249 if rule_user.user not in users:
5241 5250 users[rule_user.user.username] = {
5242 5251 'user': rule_user.user,
5243 5252 'source': 'user',
5244 5253 'source_data': {},
5245 5254 'data': rule_user.rule_data()
5246 5255 }
5247 5256
5248 5257 for rule_user_group in self.rule_user_groups:
5249 5258 source_data = {
5250 5259 'user_group_id': rule_user_group.users_group.users_group_id,
5251 5260 'name': rule_user_group.users_group.users_group_name,
5252 5261 'members': len(rule_user_group.users_group.members)
5253 5262 }
5254 5263 for member in rule_user_group.users_group.members:
5255 5264 if member.user.active:
5256 5265 key = member.user.username
5257 5266 if key in users:
5258 5267 # skip this member as we have him already
5259 5268 # this prevents from override the "first" matched
5260 5269 # users with duplicates in multiple groups
5261 5270 continue
5262 5271
5263 5272 users[key] = {
5264 5273 'user': member.user,
5265 5274 'source': 'user_group',
5266 5275 'source_data': source_data,
5267 5276 'data': rule_user_group.rule_data()
5268 5277 }
5269 5278
5270 5279 return users
5271 5280
5272 5281 def user_group_vote_rule(self, user_id):
5273 5282
5274 5283 rules = []
5275 5284 if not self.rule_user_groups:
5276 5285 return rules
5277 5286
5278 5287 for user_group in self.rule_user_groups:
5279 5288 user_group_members = [x.user_id for x in user_group.users_group.members]
5280 5289 if user_id in user_group_members:
5281 5290 rules.append(user_group)
5282 5291 return rules
5283 5292
5284 5293 def __repr__(self):
5285 5294 return f'<RepoReviewerRule(id={self.repo_review_rule_id}, repo={self.repo!r})>'
5286 5295
5287 5296
5288 5297 class ScheduleEntry(Base, BaseModel):
5289 5298 __tablename__ = 'schedule_entries'
5290 5299 __table_args__ = (
5291 5300 UniqueConstraint('schedule_name', name='s_schedule_name_idx'),
5292 5301 UniqueConstraint('task_uid', name='s_task_uid_idx'),
5293 5302 base_table_args,
5294 5303 )
5295 5304 SCHEDULE_TYPE_INTEGER = "integer"
5296 5305 SCHEDULE_TYPE_CRONTAB = "crontab"
5297 5306
5298 5307 schedule_types = [SCHEDULE_TYPE_CRONTAB, SCHEDULE_TYPE_INTEGER]
5299 5308 schedule_entry_id = Column('schedule_entry_id', Integer(), primary_key=True)
5300 5309
5301 5310 schedule_name = Column("schedule_name", String(255), nullable=False, unique=None, default=None)
5302 5311 schedule_description = Column("schedule_description", String(10000), nullable=True, unique=None, default=None)
5303 5312 schedule_enabled = Column("schedule_enabled", Boolean(), nullable=False, unique=None, default=True)
5304 5313
5305 5314 _schedule_type = Column("schedule_type", String(255), nullable=False, unique=None, default=None)
5306 5315 schedule_definition = Column('schedule_definition_json', MutationObj.as_mutable(JsonType(default=lambda: "", dialect_map=dict(mysql=LONGTEXT()))))
5307 5316
5308 5317 schedule_last_run = Column('schedule_last_run', DateTime(timezone=False), nullable=True, unique=None, default=None)
5309 5318 schedule_total_run_count = Column('schedule_total_run_count', Integer(), nullable=True, unique=None, default=0)
5310 5319
5311 5320 # task
5312 5321 task_uid = Column("task_uid", String(255), nullable=False, unique=None, default=None)
5313 5322 task_dot_notation = Column("task_dot_notation", String(4096), nullable=False, unique=None, default=None)
5314 5323 task_args = Column('task_args_json', MutationObj.as_mutable(JsonType(default=list, dialect_map=dict(mysql=LONGTEXT()))))
5315 5324 task_kwargs = Column('task_kwargs_json', MutationObj.as_mutable(JsonType(default=dict, dialect_map=dict(mysql=LONGTEXT()))))
5316 5325
5317 5326 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5318 5327 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=None)
5319 5328
5320 5329 @hybrid_property
5321 5330 def schedule_type(self):
5322 5331 return self._schedule_type
5323 5332
5324 5333 @schedule_type.setter
5325 5334 def schedule_type(self, val):
5326 5335 if val not in self.schedule_types:
5327 5336 raise ValueError('Value must be on of `{}` and got `{}`'.format(
5328 5337 val, self.schedule_type))
5329 5338
5330 5339 self._schedule_type = val
5331 5340
5332 5341 @classmethod
5333 5342 def get_uid(cls, obj):
5334 5343 args = obj.task_args
5335 5344 kwargs = obj.task_kwargs
5336 5345 if isinstance(args, JsonRaw):
5337 5346 try:
5338 5347 args = json.loads(args)
5339 5348 except ValueError:
5340 5349 args = tuple()
5341 5350
5342 5351 if isinstance(kwargs, JsonRaw):
5343 5352 try:
5344 5353 kwargs = json.loads(kwargs)
5345 5354 except ValueError:
5346 5355 kwargs = dict()
5347 5356
5348 5357 dot_notation = obj.task_dot_notation
5349 5358 val = '.'.join(map(safe_str, [
5350 5359 sorted(dot_notation), args, sorted(kwargs.items())]))
5351 5360 return sha1(safe_bytes(val))
5352 5361
5353 5362 @classmethod
5354 5363 def get_by_schedule_name(cls, schedule_name):
5355 5364 return cls.query().filter(cls.schedule_name == schedule_name).scalar()
5356 5365
5357 5366 @classmethod
5358 5367 def get_by_schedule_id(cls, schedule_id):
5359 5368 return cls.query().filter(cls.schedule_entry_id == schedule_id).scalar()
5360 5369
5361 5370 @property
5362 5371 def task(self):
5363 5372 return self.task_dot_notation
5364 5373
5365 5374 @property
5366 5375 def schedule(self):
5367 5376 from rhodecode.lib.celerylib.utils import raw_2_schedule
5368 5377 schedule = raw_2_schedule(self.schedule_definition, self.schedule_type)
5369 5378 return schedule
5370 5379
5371 5380 @property
5372 5381 def args(self):
5373 5382 try:
5374 5383 return list(self.task_args or [])
5375 5384 except ValueError:
5376 5385 return list()
5377 5386
5378 5387 @property
5379 5388 def kwargs(self):
5380 5389 try:
5381 5390 return dict(self.task_kwargs or {})
5382 5391 except ValueError:
5383 5392 return dict()
5384 5393
5385 5394 def _as_raw(self, val, indent=False):
5386 5395 if hasattr(val, 'de_coerce'):
5387 5396 val = val.de_coerce()
5388 5397 if val:
5389 5398 if indent:
5390 5399 val = ext_json.formatted_str_json(val)
5391 5400 else:
5392 5401 val = ext_json.str_json(val)
5393 5402
5394 5403 return val
5395 5404
5396 5405 @property
5397 5406 def schedule_definition_raw(self):
5398 5407 return self._as_raw(self.schedule_definition)
5399 5408
5400 5409 def args_raw(self, indent=False):
5401 5410 return self._as_raw(self.task_args, indent)
5402 5411
5403 5412 def kwargs_raw(self, indent=False):
5404 5413 return self._as_raw(self.task_kwargs, indent)
5405 5414
5406 5415 def __repr__(self):
5407 5416 return f'<DB:ScheduleEntry({self.schedule_entry_id}:{self.schedule_name})>'
5408 5417
5409 5418
5410 5419 @event.listens_for(ScheduleEntry, 'before_update')
5411 5420 def update_task_uid(mapper, connection, target):
5412 5421 target.task_uid = ScheduleEntry.get_uid(target)
5413 5422
5414 5423
5415 5424 @event.listens_for(ScheduleEntry, 'before_insert')
5416 5425 def set_task_uid(mapper, connection, target):
5417 5426 target.task_uid = ScheduleEntry.get_uid(target)
5418 5427
5419 5428
5420 5429 class _BaseBranchPerms(BaseModel):
5421 5430 @classmethod
5422 5431 def compute_hash(cls, value):
5423 5432 return sha1_safe(value)
5424 5433
5425 5434 @hybrid_property
5426 5435 def branch_pattern(self):
5427 5436 return self._branch_pattern or '*'
5428 5437
5429 5438 @hybrid_property
5430 5439 def branch_hash(self):
5431 5440 return self._branch_hash
5432 5441
5433 5442 def _validate_glob(self, value):
5434 5443 re.compile('^' + glob2re(value) + '$')
5435 5444
5436 5445 @branch_pattern.setter
5437 5446 def branch_pattern(self, value):
5438 5447 self._validate_glob(value)
5439 5448 self._branch_pattern = value or '*'
5440 5449 # set the Hash when setting the branch pattern
5441 5450 self._branch_hash = self.compute_hash(self._branch_pattern)
5442 5451
5443 5452 def matches(self, branch):
5444 5453 """
5445 5454 Check if this the branch matches entry
5446 5455
5447 5456 :param branch: branch name for the commit
5448 5457 """
5449 5458
5450 5459 branch = branch or ''
5451 5460
5452 5461 branch_matches = True
5453 5462 if branch:
5454 5463 branch_regex = re.compile('^' + glob2re(self.branch_pattern) + '$')
5455 5464 branch_matches = bool(branch_regex.search(branch))
5456 5465
5457 5466 return branch_matches
5458 5467
5459 5468
5460 5469 class UserToRepoBranchPermission(Base, _BaseBranchPerms):
5461 5470 __tablename__ = 'user_to_repo_branch_permissions'
5462 5471 __table_args__ = (
5463 5472 base_table_args
5464 5473 )
5465 5474
5466 5475 branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True)
5467 5476
5468 5477 repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
5469 5478 repo = relationship('Repository', back_populates='user_branch_perms')
5470 5479
5471 5480 permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
5472 5481 permission = relationship('Permission')
5473 5482
5474 5483 rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('repo_to_perm.repo_to_perm_id'), nullable=False, unique=None, default=None)
5475 5484 user_repo_to_perm = relationship('UserRepoToPerm', back_populates='branch_perm_entry')
5476 5485
5477 5486 rule_order = Column('rule_order', Integer(), nullable=False)
5478 5487 _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default='*') # glob
5479 5488 _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql'))
5480 5489
5481 5490 def __repr__(self):
5482 5491 return f'<UserBranchPermission({self.user_repo_to_perm} => {self.branch_pattern!r})>'
5483 5492
5484 5493
5485 5494 class UserGroupToRepoBranchPermission(Base, _BaseBranchPerms):
5486 5495 __tablename__ = 'user_group_to_repo_branch_permissions'
5487 5496 __table_args__ = (
5488 5497 base_table_args
5489 5498 )
5490 5499
5491 5500 branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True)
5492 5501
5493 5502 repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
5494 5503 repo = relationship('Repository', back_populates='user_group_branch_perms')
5495 5504
5496 5505 permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
5497 5506 permission = relationship('Permission')
5498 5507
5499 5508 rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('users_group_repo_to_perm.users_group_to_perm_id'), nullable=False, unique=None, default=None)
5500 5509 user_group_repo_to_perm = relationship('UserGroupRepoToPerm', back_populates='user_group_branch_perms')
5501 5510
5502 5511 rule_order = Column('rule_order', Integer(), nullable=False)
5503 5512 _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default='*') # glob
5504 5513 _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql'))
5505 5514
5506 5515 def __repr__(self):
5507 5516 return f'<UserBranchPermission({self.user_group_repo_to_perm} => {self.branch_pattern!r})>'
5508 5517
5509 5518
5510 5519 class UserBookmark(Base, BaseModel):
5511 5520 __tablename__ = 'user_bookmarks'
5512 5521 __table_args__ = (
5513 5522 UniqueConstraint('user_id', 'bookmark_repo_id'),
5514 5523 UniqueConstraint('user_id', 'bookmark_repo_group_id'),
5515 5524 UniqueConstraint('user_id', 'bookmark_position'),
5516 5525 base_table_args
5517 5526 )
5518 5527
5519 5528 user_bookmark_id = Column("user_bookmark_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
5520 5529 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
5521 5530 position = Column("bookmark_position", Integer(), nullable=False)
5522 5531 title = Column("bookmark_title", String(255), nullable=True, unique=None, default=None)
5523 5532 redirect_url = Column("bookmark_redirect_url", String(10240), nullable=True, unique=None, default=None)
5524 5533 created_on = Column("created_on", DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5525 5534
5526 5535 bookmark_repo_id = Column("bookmark_repo_id", Integer(), ForeignKey("repositories.repo_id"), nullable=True, unique=None, default=None)
5527 5536 bookmark_repo_group_id = Column("bookmark_repo_group_id", Integer(), ForeignKey("groups.group_id"), nullable=True, unique=None, default=None)
5528 5537
5529 5538 user = relationship("User")
5530 5539
5531 5540 repository = relationship("Repository")
5532 5541 repository_group = relationship("RepoGroup")
5533 5542
5534 5543 @classmethod
5535 5544 def get_by_position_for_user(cls, position, user_id):
5536 5545 return cls.query() \
5537 5546 .filter(UserBookmark.user_id == user_id) \
5538 5547 .filter(UserBookmark.position == position).scalar()
5539 5548
5540 5549 @classmethod
5541 5550 def get_bookmarks_for_user(cls, user_id, cache=True):
5542 5551 bookmarks = cls.query() \
5543 5552 .filter(UserBookmark.user_id == user_id) \
5544 5553 .options(joinedload(UserBookmark.repository)) \
5545 5554 .options(joinedload(UserBookmark.repository_group)) \
5546 5555 .order_by(UserBookmark.position.asc())
5547 5556
5548 5557 if cache:
5549 5558 bookmarks = bookmarks.options(
5550 5559 FromCache("sql_cache_short", "get_user_{}_bookmarks".format(user_id))
5551 5560 )
5552 5561
5553 5562 return bookmarks.all()
5554 5563
5555 5564 def __repr__(self):
5556 5565 return f'<UserBookmark({self.position} @ {self.redirect_url!r})>'
5557 5566
5558 5567
5559 5568 class FileStore(Base, BaseModel):
5560 5569 __tablename__ = 'file_store'
5561 5570 __table_args__ = (
5562 5571 base_table_args
5563 5572 )
5564 5573
5565 5574 file_store_id = Column('file_store_id', Integer(), primary_key=True)
5566 5575 file_uid = Column('file_uid', String(1024), nullable=False)
5567 5576 file_display_name = Column('file_display_name', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), nullable=True)
5568 5577 file_description = Column('file_description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=True)
5569 5578 file_org_name = Column('file_org_name', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=False)
5570 5579
5571 5580 # sha256 hash
5572 5581 file_hash = Column('file_hash', String(512), nullable=False)
5573 5582 file_size = Column('file_size', BigInteger(), nullable=False)
5574 5583
5575 5584 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5576 5585 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True)
5577 5586 accessed_count = Column('accessed_count', Integer(), default=0)
5578 5587
5579 5588 enabled = Column('enabled', Boolean(), nullable=False, default=True)
5580 5589
5581 5590 # if repo/repo_group reference is set, check for permissions
5582 5591 check_acl = Column('check_acl', Boolean(), nullable=False, default=True)
5583 5592
5584 5593 # hidden defines an attachment that should be hidden from showing in artifact listing
5585 5594 hidden = Column('hidden', Boolean(), nullable=False, default=False)
5586 5595
5587 5596 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
5588 5597 upload_user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.user_id', back_populates='artifacts')
5589 5598
5590 5599 file_metadata = relationship('FileStoreMetadata', lazy='joined')
5591 5600
5592 5601 # scope limited to user, which requester have access to
5593 5602 scope_user_id = Column(
5594 5603 'scope_user_id', Integer(), ForeignKey('users.user_id'),
5595 5604 nullable=True, unique=None, default=None)
5596 5605 user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.scope_user_id', back_populates='scope_artifacts')
5597 5606
5598 5607 # scope limited to user group, which requester have access to
5599 5608 scope_user_group_id = Column(
5600 5609 'scope_user_group_id', Integer(), ForeignKey('users_groups.users_group_id'),
5601 5610 nullable=True, unique=None, default=None)
5602 5611 user_group = relationship('UserGroup', lazy='joined')
5603 5612
5604 5613 # scope limited to repo, which requester have access to
5605 5614 scope_repo_id = Column(
5606 5615 'scope_repo_id', Integer(), ForeignKey('repositories.repo_id'),
5607 5616 nullable=True, unique=None, default=None)
5608 5617 repo = relationship('Repository', lazy='joined')
5609 5618
5610 5619 # scope limited to repo group, which requester have access to
5611 5620 scope_repo_group_id = Column(
5612 5621 'scope_repo_group_id', Integer(), ForeignKey('groups.group_id'),
5613 5622 nullable=True, unique=None, default=None)
5614 5623 repo_group = relationship('RepoGroup', lazy='joined')
5615 5624
5616 5625 @classmethod
5617 5626 def get_scope(cls, scope_type, scope_id):
5618 5627 if scope_type == 'repo':
5619 5628 return f'repo:{scope_id}'
5620 5629 elif scope_type == 'repo-group':
5621 5630 return f'repo-group:{scope_id}'
5622 5631 elif scope_type == 'user':
5623 5632 return f'user:{scope_id}'
5624 5633 elif scope_type == 'user-group':
5625 5634 return f'user-group:{scope_id}'
5626 5635 else:
5627 5636 return scope_type
5628 5637
5629 5638 @classmethod
5630 5639 def get_by_store_uid(cls, file_store_uid, safe=False):
5631 5640 if safe:
5632 5641 return FileStore.query().filter(FileStore.file_uid == file_store_uid).first()
5633 5642 else:
5634 5643 return FileStore.query().filter(FileStore.file_uid == file_store_uid).scalar()
5635 5644
5636 5645 @classmethod
5637 5646 def create(cls, file_uid, filename, file_hash, file_size, file_display_name='',
5638 5647 file_description='', enabled=True, hidden=False, check_acl=True,
5639 5648 user_id=None, scope_user_id=None, scope_repo_id=None, scope_repo_group_id=None):
5640 5649
5641 5650 store_entry = FileStore()
5642 5651 store_entry.file_uid = file_uid
5643 5652 store_entry.file_display_name = file_display_name
5644 5653 store_entry.file_org_name = filename
5645 5654 store_entry.file_size = file_size
5646 5655 store_entry.file_hash = file_hash
5647 5656 store_entry.file_description = file_description
5648 5657
5649 5658 store_entry.check_acl = check_acl
5650 5659 store_entry.enabled = enabled
5651 5660 store_entry.hidden = hidden
5652 5661
5653 5662 store_entry.user_id = user_id
5654 5663 store_entry.scope_user_id = scope_user_id
5655 5664 store_entry.scope_repo_id = scope_repo_id
5656 5665 store_entry.scope_repo_group_id = scope_repo_group_id
5657 5666
5658 5667 return store_entry
5659 5668
5660 5669 @classmethod
5661 5670 def store_metadata(cls, file_store_id, args, commit=True):
5662 5671 file_store = FileStore.get(file_store_id)
5663 5672 if file_store is None:
5664 5673 return
5665 5674
5666 5675 for section, key, value, value_type in args:
5667 5676 has_key = FileStoreMetadata().query() \
5668 5677 .filter(FileStoreMetadata.file_store_id == file_store.file_store_id) \
5669 5678 .filter(FileStoreMetadata.file_store_meta_section == section) \
5670 5679 .filter(FileStoreMetadata.file_store_meta_key == key) \
5671 5680 .scalar()
5672 5681 if has_key:
5673 5682 msg = 'key `{}` already defined under section `{}` for this file.'\
5674 5683 .format(key, section)
5675 5684 raise ArtifactMetadataDuplicate(msg, err_section=section, err_key=key)
5676 5685
5677 5686 # NOTE(marcink): raises ArtifactMetadataBadValueType
5678 5687 FileStoreMetadata.valid_value_type(value_type)
5679 5688
5680 5689 meta_entry = FileStoreMetadata()
5681 5690 meta_entry.file_store = file_store
5682 5691 meta_entry.file_store_meta_section = section
5683 5692 meta_entry.file_store_meta_key = key
5684 5693 meta_entry.file_store_meta_value_type = value_type
5685 5694 meta_entry.file_store_meta_value = value
5686 5695
5687 5696 Session().add(meta_entry)
5688 5697
5689 5698 try:
5690 5699 if commit:
5691 5700 Session().commit()
5692 5701 except IntegrityError:
5693 5702 Session().rollback()
5694 5703 raise ArtifactMetadataDuplicate('Duplicate section/key found for this file.')
5695 5704
5696 5705 @classmethod
5697 5706 def bump_access_counter(cls, file_uid, commit=True):
5698 5707 FileStore().query()\
5699 5708 .filter(FileStore.file_uid == file_uid)\
5700 5709 .update({FileStore.accessed_count: (FileStore.accessed_count + 1),
5701 5710 FileStore.accessed_on: datetime.datetime.now()})
5702 5711 if commit:
5703 5712 Session().commit()
5704 5713
5705 5714 def __json__(self):
5706 5715 data = {
5707 5716 'filename': self.file_display_name,
5708 5717 'filename_org': self.file_org_name,
5709 5718 'file_uid': self.file_uid,
5710 5719 'description': self.file_description,
5711 5720 'hidden': self.hidden,
5712 5721 'size': self.file_size,
5713 5722 'created_on': self.created_on,
5714 5723 'uploaded_by': self.upload_user.get_api_data(details='basic'),
5715 5724 'downloaded_times': self.accessed_count,
5716 5725 'sha256': self.file_hash,
5717 5726 'metadata': self.file_metadata,
5718 5727 }
5719 5728
5720 5729 return data
5721 5730
5722 5731 def __repr__(self):
5723 5732 return f'<FileStore({self.file_store_id})>'
5724 5733
5725 5734
5726 5735 class FileStoreMetadata(Base, BaseModel):
5727 5736 __tablename__ = 'file_store_metadata'
5728 5737 __table_args__ = (
5729 5738 UniqueConstraint('file_store_id', 'file_store_meta_section_hash', 'file_store_meta_key_hash'),
5730 5739 Index('file_store_meta_section_idx', 'file_store_meta_section', mysql_length=255),
5731 5740 Index('file_store_meta_key_idx', 'file_store_meta_key', mysql_length=255),
5732 5741 base_table_args
5733 5742 )
5734 5743 SETTINGS_TYPES = {
5735 5744 'str': safe_str,
5736 5745 'int': safe_int,
5737 5746 'unicode': safe_str,
5738 5747 'bool': str2bool,
5739 5748 'list': functools.partial(aslist, sep=',')
5740 5749 }
5741 5750
5742 5751 file_store_meta_id = Column(
5743 5752 "file_store_meta_id", Integer(), nullable=False, unique=True, default=None,
5744 5753 primary_key=True)
5745 5754 _file_store_meta_section = Column(
5746 5755 "file_store_meta_section", UnicodeText().with_variant(UnicodeText(1024), 'mysql'),
5747 5756 nullable=True, unique=None, default=None)
5748 5757 _file_store_meta_section_hash = Column(
5749 5758 "file_store_meta_section_hash", String(255),
5750 5759 nullable=True, unique=None, default=None)
5751 5760 _file_store_meta_key = Column(
5752 5761 "file_store_meta_key", UnicodeText().with_variant(UnicodeText(1024), 'mysql'),
5753 5762 nullable=True, unique=None, default=None)
5754 5763 _file_store_meta_key_hash = Column(
5755 5764 "file_store_meta_key_hash", String(255), nullable=True, unique=None, default=None)
5756 5765 _file_store_meta_value = Column(
5757 5766 "file_store_meta_value", UnicodeText().with_variant(UnicodeText(20480), 'mysql'),
5758 5767 nullable=True, unique=None, default=None)
5759 5768 _file_store_meta_value_type = Column(
5760 5769 "file_store_meta_value_type", String(255), nullable=True, unique=None,
5761 5770 default='unicode')
5762 5771
5763 5772 file_store_id = Column(
5764 5773 'file_store_id', Integer(), ForeignKey('file_store.file_store_id'),
5765 5774 nullable=True, unique=None, default=None)
5766 5775
5767 5776 file_store = relationship('FileStore', lazy='joined', viewonly=True)
5768 5777
5769 5778 @classmethod
5770 5779 def valid_value_type(cls, value):
5771 5780 if value.split('.')[0] not in cls.SETTINGS_TYPES:
5772 5781 raise ArtifactMetadataBadValueType(
5773 5782 'value_type must be one of %s got %s' % (cls.SETTINGS_TYPES.keys(), value))
5774 5783
5775 5784 @hybrid_property
5776 5785 def file_store_meta_section(self):
5777 5786 return self._file_store_meta_section
5778 5787
5779 5788 @file_store_meta_section.setter
5780 5789 def file_store_meta_section(self, value):
5781 5790 self._file_store_meta_section = value
5782 5791 self._file_store_meta_section_hash = _hash_key(value)
5783 5792
5784 5793 @hybrid_property
5785 5794 def file_store_meta_key(self):
5786 5795 return self._file_store_meta_key
5787 5796
5788 5797 @file_store_meta_key.setter
5789 5798 def file_store_meta_key(self, value):
5790 5799 self._file_store_meta_key = value
5791 5800 self._file_store_meta_key_hash = _hash_key(value)
5792 5801
5793 5802 @hybrid_property
5794 5803 def file_store_meta_value(self):
5795 5804 val = self._file_store_meta_value
5796 5805
5797 5806 if self._file_store_meta_value_type:
5798 5807 # e.g unicode.encrypted == unicode
5799 5808 _type = self._file_store_meta_value_type.split('.')[0]
5800 5809 # decode the encrypted value if it's encrypted field type
5801 5810 if '.encrypted' in self._file_store_meta_value_type:
5802 5811 cipher = EncryptedTextValue()
5803 5812 val = safe_str(cipher.process_result_value(val, None))
5804 5813 # do final type conversion
5805 5814 converter = self.SETTINGS_TYPES.get(_type) or self.SETTINGS_TYPES['unicode']
5806 5815 val = converter(val)
5807 5816
5808 5817 return val
5809 5818
5810 5819 @file_store_meta_value.setter
5811 5820 def file_store_meta_value(self, val):
5812 5821 val = safe_str(val)
5813 5822 # encode the encrypted value
5814 5823 if '.encrypted' in self.file_store_meta_value_type:
5815 5824 cipher = EncryptedTextValue()
5816 5825 val = safe_str(cipher.process_bind_param(val, None))
5817 5826 self._file_store_meta_value = val
5818 5827
5819 5828 @hybrid_property
5820 5829 def file_store_meta_value_type(self):
5821 5830 return self._file_store_meta_value_type
5822 5831
5823 5832 @file_store_meta_value_type.setter
5824 5833 def file_store_meta_value_type(self, val):
5825 5834 # e.g unicode.encrypted
5826 5835 self.valid_value_type(val)
5827 5836 self._file_store_meta_value_type = val
5828 5837
5829 5838 def __json__(self):
5830 5839 data = {
5831 5840 'artifact': self.file_store.file_uid,
5832 5841 'section': self.file_store_meta_section,
5833 5842 'key': self.file_store_meta_key,
5834 5843 'value': self.file_store_meta_value,
5835 5844 }
5836 5845
5837 5846 return data
5838 5847
5839 5848 def __repr__(self):
5840 5849 return '<%s[%s]%s=>%s]>' % (self.cls_name, self.file_store_meta_section,
5841 5850 self.file_store_meta_key, self.file_store_meta_value)
5842 5851
5843 5852
5844 5853 class DbMigrateVersion(Base, BaseModel):
5845 5854 __tablename__ = 'db_migrate_version'
5846 5855 __table_args__ = (
5847 5856 base_table_args,
5848 5857 )
5849 5858
5850 5859 repository_id = Column('repository_id', String(250), primary_key=True)
5851 5860 repository_path = Column('repository_path', Text)
5852 5861 version = Column('version', Integer)
5853 5862
5854 5863 @classmethod
5855 5864 def set_version(cls, version):
5856 5865 """
5857 5866 Helper for forcing a different version, usually for debugging purposes via ishell.
5858 5867 """
5859 5868 ver = DbMigrateVersion.query().first()
5860 5869 ver.version = version
5861 5870 Session().commit()
5862 5871
5863 5872
5864 5873 class DbSession(Base, BaseModel):
5865 5874 __tablename__ = 'db_session'
5866 5875 __table_args__ = (
5867 5876 base_table_args,
5868 5877 )
5869 5878
5870 5879 def __repr__(self):
5871 5880 return f'<DB:DbSession({self.id})>'
5872 5881
5873 5882 id = Column('id', Integer())
5874 5883 namespace = Column('namespace', String(255), primary_key=True)
5875 5884 accessed = Column('accessed', DateTime, nullable=False)
5876 5885 created = Column('created', DateTime, nullable=False)
5877 5886 data = Column('data', PickleType, nullable=False)
@@ -1,1063 +1,1062 b''
1 1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 """
20 20 Scm model for RhodeCode
21 21 """
22 22
23 23 import os.path
24 24 import traceback
25 25 import logging
26 26 import io
27 27
28 28 from sqlalchemy import func
29 29 from zope.cachedescriptors.property import Lazy as LazyProperty
30 30
31 31 import rhodecode
32 32 from rhodecode.lib.str_utils import safe_bytes
33 33 from rhodecode.lib.vcs import get_backend
34 34 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
35 35 from rhodecode.lib.vcs.nodes import FileNode
36 36 from rhodecode.lib.vcs.backends.base import EmptyCommit
37 37 from rhodecode.lib import helpers as h, rc_cache
38 38 from rhodecode.lib.auth import (
39 39 HasRepoPermissionAny, HasRepoGroupPermissionAny,
40 40 HasUserGroupPermissionAny)
41 41 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
42 42 from rhodecode.lib import hooks_utils
43 43 from rhodecode.lib.utils import (
44 44 get_filesystem_repos, make_db_config)
45 45 from rhodecode.lib.str_utils import safe_str
46 46 from rhodecode.lib.system_info import get_system_info
47 47 from rhodecode.model import BaseModel
48 48 from rhodecode.model.db import (
49 49 or_, false, null,
50 50 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
51 51 PullRequest, FileStore)
52 52 from rhodecode.model.settings import VcsSettingsModel
53 53 from rhodecode.model.validation_schema.validators import url_validator, InvalidCloneUrl
54 54
55 55 log = logging.getLogger(__name__)
56 56
57 57
58 58 class UserTemp(object):
59 59 def __init__(self, user_id):
60 60 self.user_id = user_id
61 61
62 62 def __repr__(self):
63 63 return "<{}('id:{}')>".format(self.__class__.__name__, self.user_id)
64 64
65 65
66 66 class RepoTemp(object):
67 67 def __init__(self, repo_id):
68 68 self.repo_id = repo_id
69 69
70 70 def __repr__(self):
71 71 return "<{}('id:{}')>".format(self.__class__.__name__, self.repo_id)
72 72
73 73
74 74 class SimpleCachedRepoList(object):
75 75 """
76 76 Lighter version of of iteration of repos without the scm initialisation,
77 77 and with cache usage
78 78 """
79 79 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
80 80 self.db_repo_list = db_repo_list
81 81 self.repos_path = repos_path
82 82 self.order_by = order_by
83 83 self.reversed = (order_by or '').startswith('-')
84 84 if not perm_set:
85 85 perm_set = ['repository.read', 'repository.write',
86 86 'repository.admin']
87 87 self.perm_set = perm_set
88 88
89 89 def __len__(self):
90 90 return len(self.db_repo_list)
91 91
92 92 def __repr__(self):
93 93 return '<{} ({})>'.format(self.__class__.__name__, self.__len__())
94 94
95 95 def __iter__(self):
96 96 for dbr in self.db_repo_list:
97 97 # check permission at this level
98 98 has_perm = HasRepoPermissionAny(*self.perm_set)(
99 99 dbr.repo_name, 'SimpleCachedRepoList check')
100 100 if not has_perm:
101 101 continue
102 102
103 103 tmp_d = {
104 104 'name': dbr.repo_name,
105 105 'dbrepo': dbr.get_dict(),
106 106 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
107 107 }
108 108 yield tmp_d
109 109
110 110
111 111 class _PermCheckIterator(object):
112 112
113 113 def __init__(
114 114 self, obj_list, obj_attr, perm_set, perm_checker,
115 115 extra_kwargs=None):
116 116 """
117 117 Creates iterator from given list of objects, additionally
118 118 checking permission for them from perm_set var
119 119
120 120 :param obj_list: list of db objects
121 121 :param obj_attr: attribute of object to pass into perm_checker
122 122 :param perm_set: list of permissions to check
123 123 :param perm_checker: callable to check permissions against
124 124 """
125 125 self.obj_list = obj_list
126 126 self.obj_attr = obj_attr
127 127 self.perm_set = perm_set
128 128 self.perm_checker = perm_checker(*self.perm_set)
129 129 self.extra_kwargs = extra_kwargs or {}
130 130
131 131 def __len__(self):
132 132 return len(self.obj_list)
133 133
134 134 def __repr__(self):
135 135 return '<{} ({})>'.format(self.__class__.__name__, self.__len__())
136 136
137 137 def __iter__(self):
138 138 for db_obj in self.obj_list:
139 139 # check permission at this level
140 140 # NOTE(marcink): the __dict__.get() is ~4x faster then getattr()
141 141 name = db_obj.__dict__.get(self.obj_attr, None)
142 142 if not self.perm_checker(name, self.__class__.__name__, **self.extra_kwargs):
143 143 continue
144 144
145 145 yield db_obj
146 146
147 147
148 148 class RepoList(_PermCheckIterator):
149 149
150 150 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
151 151 if not perm_set:
152 152 perm_set = ['repository.read', 'repository.write', 'repository.admin']
153 153
154 154 super().__init__(
155 155 obj_list=db_repo_list,
156 156 obj_attr='_repo_name', perm_set=perm_set,
157 157 perm_checker=HasRepoPermissionAny,
158 158 extra_kwargs=extra_kwargs)
159 159
160 160
161 161 class RepoGroupList(_PermCheckIterator):
162 162
163 163 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
164 164 if not perm_set:
165 165 perm_set = ['group.read', 'group.write', 'group.admin']
166 166
167 167 super().__init__(
168 168 obj_list=db_repo_group_list,
169 169 obj_attr='_group_name', perm_set=perm_set,
170 170 perm_checker=HasRepoGroupPermissionAny,
171 171 extra_kwargs=extra_kwargs)
172 172
173 173
174 174 class UserGroupList(_PermCheckIterator):
175 175
176 176 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
177 177 if not perm_set:
178 178 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
179 179
180 180 super().__init__(
181 181 obj_list=db_user_group_list,
182 182 obj_attr='users_group_name', perm_set=perm_set,
183 183 perm_checker=HasUserGroupPermissionAny,
184 184 extra_kwargs=extra_kwargs)
185 185
186 186
187 187 class ScmModel(BaseModel):
188 188 """
189 189 Generic Scm Model
190 190 """
191 191
192 192 @LazyProperty
193 193 def repos_path(self):
194 194 """
195 195 Gets the repositories root path from database
196 196 """
197 197
198 198 settings_model = VcsSettingsModel(sa=self.sa)
199 199 return settings_model.get_repos_location()
200 200
201 201 def repo_scan(self, repos_path=None):
202 202 """
203 203 Listing of repositories in given path. This path should not be a
204 204 repository itself. Return a dictionary of repository objects
205 205
206 206 :param repos_path: path to directory containing repositories
207 207 """
208 208
209 209 if repos_path is None:
210 210 repos_path = self.repos_path
211 211
212 212 log.info('scanning for repositories in %s', repos_path)
213 213
214 214 config = make_db_config()
215 215 config.set('extensions', 'largefiles', '')
216 216 repos = {}
217 217
218 218 for name, path in get_filesystem_repos(repos_path, recursive=True):
219 219 # name need to be decomposed and put back together using the /
220 220 # since this is internal storage separator for rhodecode
221 221 name = Repository.normalize_repo_name(name)
222 222
223 223 try:
224 224 if name in repos:
225 225 raise RepositoryError('Duplicate repository name %s '
226 226 'found in %s' % (name, path))
227 227 elif path[0] in rhodecode.BACKENDS:
228 228 backend = get_backend(path[0])
229 229 repos[name] = backend(path[1], config=config,
230 230 with_wire={"cache": False})
231 231 except OSError:
232 232 continue
233 233 except RepositoryError:
234 234 log.exception('Failed to create a repo')
235 235 continue
236 236
237 237 log.debug('found %s paths with repositories', len(repos))
238 238 return repos
239 239
240 240 def get_repos(self, all_repos=None, sort_key=None):
241 241 """
242 242 Get all repositories from db and for each repo create it's
243 243 backend instance and fill that backed with information from database
244 244
245 245 :param all_repos: list of repository names as strings
246 246 give specific repositories list, good for filtering
247 247
248 248 :param sort_key: initial sorting of repositories
249 249 """
250 250 if all_repos is None:
251 251 all_repos = self.sa.query(Repository)\
252 252 .filter(Repository.group_id == null())\
253 253 .order_by(func.lower(Repository.repo_name)).all()
254 254 repo_iter = SimpleCachedRepoList(
255 255 all_repos, repos_path=self.repos_path, order_by=sort_key)
256 256 return repo_iter
257 257
258 258 @staticmethod
259 259 def get_parent_commits(parent_commit, scm_instance):
260 260 if not parent_commit:
261 261 parent_commit = EmptyCommit(alias=scm_instance.alias)
262 262
263 263 if isinstance(parent_commit, EmptyCommit):
264 264 # EmptyCommit means we're editing empty repository
265 265 parents = None
266 266 else:
267 267 parents = [parent_commit]
268 268 return parent_commit, parents
269 269
270 270 def initialize_inmemory_vars(self, user, repo, message, author):
271 271 """
272 272 Initialize node specific objects for further usage
273 273 """
274 274 user = self._get_user(user)
275 275 scm_instance = repo.scm_instance(cache=False)
276 276 message = safe_str(message)
277 277 commiter = user.full_contact
278 278 author = safe_str(author) if author else commiter
279 279 imc = scm_instance.in_memory_commit
280 280
281 281 return user, scm_instance, message, commiter, author, imc
282 282
283 283 def get_repo_groups(self, all_groups=None):
284 284 if all_groups is None:
285 285 all_groups = RepoGroup.query()\
286 286 .filter(RepoGroup.group_parent_id == null()).all()
287 287 return [x for x in RepoGroupList(all_groups)]
288 288
289 289 def mark_for_invalidation(self, repo_name, delete=False):
290 290 """
291 291 Mark caches of this repo invalid in the database. `delete` flag
292 292 removes the cache entries
293 293
294 294 :param repo_name: the repo_name for which caches should be marked
295 295 invalid, or deleted
296 296 :param delete: delete the entry keys instead of setting bool
297 297 flag on them, and also purge caches used by the dogpile
298 298 """
299 299 repo = Repository.get_by_repo_name(repo_name)
300 300
301 301 if repo:
302 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
303 repo_id=repo.repo_id)
304 CacheKey.set_invalidate(invalidation_namespace, delete=delete)
302 repo_namespace_key = CacheKey.REPO_INVALIDATION_NAMESPACE.format(repo_id=repo.repo_id)
303 CacheKey.set_invalidate(repo_namespace_key, delete=delete)
305 304
306 305 repo_id = repo.repo_id
307 306 config = repo._config
308 307 config.set('extensions', 'largefiles', '')
309 308 repo.update_commit_cache(config=config, cs_cache=None)
310 309 if delete:
311 310 cache_namespace_uid = f'cache_repo.{repo_id}'
312 311 rc_cache.clear_cache_namespace('cache_repo', cache_namespace_uid, method=rc_cache.CLEAR_INVALIDATE)
313 312
314 313 def toggle_following_repo(self, follow_repo_id, user_id):
315 314
316 315 f = self.sa.query(UserFollowing)\
317 316 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
318 317 .filter(UserFollowing.user_id == user_id).scalar()
319 318
320 319 if f is not None:
321 320 try:
322 321 self.sa.delete(f)
323 322 return
324 323 except Exception:
325 324 log.error(traceback.format_exc())
326 325 raise
327 326
328 327 try:
329 328 f = UserFollowing()
330 329 f.user_id = user_id
331 330 f.follows_repo_id = follow_repo_id
332 331 self.sa.add(f)
333 332 except Exception:
334 333 log.error(traceback.format_exc())
335 334 raise
336 335
337 336 def toggle_following_user(self, follow_user_id, user_id):
338 337 f = self.sa.query(UserFollowing)\
339 338 .filter(UserFollowing.follows_user_id == follow_user_id)\
340 339 .filter(UserFollowing.user_id == user_id).scalar()
341 340
342 341 if f is not None:
343 342 try:
344 343 self.sa.delete(f)
345 344 return
346 345 except Exception:
347 346 log.error(traceback.format_exc())
348 347 raise
349 348
350 349 try:
351 350 f = UserFollowing()
352 351 f.user_id = user_id
353 352 f.follows_user_id = follow_user_id
354 353 self.sa.add(f)
355 354 except Exception:
356 355 log.error(traceback.format_exc())
357 356 raise
358 357
359 358 def is_following_repo(self, repo_name, user_id, cache=False):
360 359 r = self.sa.query(Repository)\
361 360 .filter(Repository.repo_name == repo_name).scalar()
362 361
363 362 f = self.sa.query(UserFollowing)\
364 363 .filter(UserFollowing.follows_repository == r)\
365 364 .filter(UserFollowing.user_id == user_id).scalar()
366 365
367 366 return f is not None
368 367
369 368 def is_following_user(self, username, user_id, cache=False):
370 369 u = User.get_by_username(username)
371 370
372 371 f = self.sa.query(UserFollowing)\
373 372 .filter(UserFollowing.follows_user == u)\
374 373 .filter(UserFollowing.user_id == user_id).scalar()
375 374
376 375 return f is not None
377 376
378 377 def get_followers(self, repo):
379 378 repo = self._get_repo(repo)
380 379
381 380 return self.sa.query(UserFollowing)\
382 381 .filter(UserFollowing.follows_repository == repo).count()
383 382
384 383 def get_forks(self, repo):
385 384 repo = self._get_repo(repo)
386 385 return self.sa.query(Repository)\
387 386 .filter(Repository.fork == repo).count()
388 387
389 388 def get_pull_requests(self, repo):
390 389 repo = self._get_repo(repo)
391 390 return self.sa.query(PullRequest)\
392 391 .filter(PullRequest.target_repo == repo)\
393 392 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
394 393
395 394 def get_artifacts(self, repo):
396 395 repo = self._get_repo(repo)
397 396 return self.sa.query(FileStore)\
398 397 .filter(FileStore.repo == repo)\
399 398 .filter(or_(FileStore.hidden == null(), FileStore.hidden == false())).count()
400 399
401 400 def mark_as_fork(self, repo, fork, user):
402 401 repo = self._get_repo(repo)
403 402 fork = self._get_repo(fork)
404 403 if fork and repo.repo_id == fork.repo_id:
405 404 raise Exception("Cannot set repository as fork of itself")
406 405
407 406 if fork and repo.repo_type != fork.repo_type:
408 407 raise RepositoryError(
409 408 "Cannot set repository as fork of repository with other type")
410 409
411 410 repo.fork = fork
412 411 self.sa.add(repo)
413 412 return repo
414 413
415 414 def pull_changes(self, repo, username, remote_uri=None, validate_uri=True, **kwargs):
416 415 dbrepo = self._get_repo(repo)
417 416 remote_uri = remote_uri or dbrepo.clone_uri
418 417 if not remote_uri:
419 418 raise Exception("This repository doesn't have a clone uri")
420 419
421 420 repo = dbrepo.scm_instance(cache=False)
422 421 repo.config.clear_section('hooks')
423 422
424 423 try:
425 424 # NOTE(marcink): add extra validation so we skip invalid urls
426 425 # this is due this tasks can be executed via scheduler without
427 426 # proper validation of remote_uri
428 427 if validate_uri:
429 428 config = make_db_config(clear_session=False)
430 429 url_validator(remote_uri, dbrepo.repo_type, config)
431 430 except InvalidCloneUrl:
432 431 raise
433 432
434 433 repo_name = dbrepo.repo_name
435 434 try:
436 435 # TODO: we need to make sure those operations call proper hooks !
437 436 repo.fetch(remote_uri, **kwargs)
438 437
439 438 self.mark_for_invalidation(repo_name)
440 439 except Exception:
441 440 log.error(traceback.format_exc())
442 441 raise
443 442
444 443 def push_changes(self, repo, username, remote_uri=None, validate_uri=True, **kwargs):
445 444 dbrepo = self._get_repo(repo)
446 445 remote_uri = remote_uri or dbrepo.push_uri
447 446 if not remote_uri:
448 447 raise Exception("This repository doesn't have a clone uri")
449 448
450 449 repo = dbrepo.scm_instance(cache=False)
451 450 repo.config.clear_section('hooks')
452 451
453 452 try:
454 453 # NOTE(marcink): add extra validation so we skip invalid urls
455 454 # this is due this tasks can be executed via scheduler without
456 455 # proper validation of remote_uri
457 456 if validate_uri:
458 457 config = make_db_config(clear_session=False)
459 458 url_validator(remote_uri, dbrepo.repo_type, config)
460 459 except InvalidCloneUrl:
461 460 raise
462 461
463 462 try:
464 463 repo.push(remote_uri, **kwargs)
465 464 except Exception:
466 465 log.error(traceback.format_exc())
467 466 raise
468 467
469 468 def commit_change(self, repo, repo_name, commit, user, author, message,
470 469 content: bytes, f_path: bytes, branch: str = None):
471 470 """
472 471 Commits changes
473 472 """
474 473 user = self._get_user(user)
475 474
476 475 # message and author needs to be unicode
477 476 # proper backend should then translate that into required type
478 477 message = safe_str(message)
479 478 author = safe_str(author)
480 479 imc = repo.in_memory_commit
481 480 imc.change(FileNode(f_path, content, mode=commit.get_file_mode(f_path)))
482 481 try:
483 482 # TODO: handle pre-push action !
484 483 tip = imc.commit(
485 484 message=message, author=author, parents=[commit],
486 485 branch=branch or commit.branch)
487 486 except Exception as e:
488 487 log.error(traceback.format_exc())
489 488 raise IMCCommitError(str(e))
490 489 finally:
491 490 # always clear caches, if commit fails we want fresh object also
492 491 self.mark_for_invalidation(repo_name)
493 492
494 493 # We trigger the post-push action
495 494 hooks_utils.trigger_post_push_hook(
496 495 username=user.username, action='push_local', hook_type='post_push',
497 496 repo_name=repo_name, repo_type=repo.alias, commit_ids=[tip.raw_id])
498 497 return tip
499 498
500 499 def _sanitize_path(self, f_path: bytes):
501 500 if f_path.startswith(b'/') or f_path.startswith(b'./') or b'../' in f_path:
502 501 raise NonRelativePathError(b'%b is not an relative path' % f_path)
503 502 if f_path:
504 503 f_path = os.path.normpath(f_path)
505 504 return f_path
506 505
507 506 def get_dirnode_metadata(self, request, commit, dir_node):
508 507 if not dir_node.is_dir():
509 508 return []
510 509
511 510 data = []
512 511 for node in dir_node:
513 512 if not node.is_file():
514 513 # we skip file-nodes
515 514 continue
516 515
517 516 last_commit = node.last_commit
518 517 last_commit_date = last_commit.date
519 518 data.append({
520 519 'name': node.name,
521 520 'size': h.format_byte_size_binary(node.size),
522 521 'modified_at': h.format_date(last_commit_date),
523 522 'modified_ts': last_commit_date.isoformat(),
524 523 'revision': last_commit.revision,
525 524 'short_id': last_commit.short_id,
526 525 'message': h.escape(last_commit.message),
527 526 'author': h.escape(last_commit.author),
528 527 'user_profile': h.gravatar_with_user(
529 528 request, last_commit.author),
530 529 })
531 530
532 531 return data
533 532
534 533 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
535 534 extended_info=False, content=False, max_file_bytes=None):
536 535 """
537 536 recursive walk in root dir and return a set of all path in that dir
538 537 based on repository walk function
539 538
540 539 :param repo_name: name of repository
541 540 :param commit_id: commit id for which to list nodes
542 541 :param root_path: root path to list
543 542 :param flat: return as a list, if False returns a dict with description
544 543 :param extended_info: show additional info such as md5, binary, size etc
545 544 :param content: add nodes content to the return data
546 545 :param max_file_bytes: will not return file contents over this limit
547 546
548 547 """
549 548 _files = list()
550 549 _dirs = list()
551 550
552 551 try:
553 552 _repo = self._get_repo(repo_name)
554 553 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
555 554 root_path = root_path.lstrip('/')
556 555
557 556 # get RootNode, inject pre-load options before walking
558 557 top_node = commit.get_node(root_path)
559 558 extended_info_pre_load = []
560 559 if extended_info:
561 560 extended_info_pre_load += ['md5']
562 561 top_node.default_pre_load = ['is_binary', 'size'] + extended_info_pre_load
563 562
564 563 for __, dirs, files in commit.walk(top_node):
565 564
566 565 for f in files:
567 566 _content = None
568 567 _data = f_name = f.str_path
569 568
570 569 if not flat:
571 570 _data = {
572 571 "name": h.escape(f_name),
573 572 "type": "file",
574 573 }
575 574 if extended_info:
576 575 _data.update({
577 576 "md5": f.md5,
578 577 "binary": f.is_binary,
579 578 "size": f.size,
580 579 "extension": f.extension,
581 580 "mimetype": f.mimetype,
582 581 "lines": f.lines()[0]
583 582 })
584 583
585 584 if content:
586 585 over_size_limit = (max_file_bytes is not None
587 586 and f.size > max_file_bytes)
588 587 full_content = None
589 588 if not f.is_binary and not over_size_limit:
590 589 full_content = f.str_content
591 590
592 591 _data.update({
593 592 "content": full_content,
594 593 })
595 594 _files.append(_data)
596 595
597 596 for d in dirs:
598 597 _data = d_name = d.str_path
599 598 if not flat:
600 599 _data = {
601 600 "name": h.escape(d_name),
602 601 "type": "dir",
603 602 }
604 603 if extended_info:
605 604 _data.update({
606 605 "md5": "",
607 606 "binary": False,
608 607 "size": 0,
609 608 "extension": "",
610 609 })
611 610 if content:
612 611 _data.update({
613 612 "content": None
614 613 })
615 614 _dirs.append(_data)
616 615 except RepositoryError:
617 616 log.exception("Exception in get_nodes")
618 617 raise
619 618
620 619 return _dirs, _files
621 620
622 621 def get_quick_filter_nodes(self, repo_name, commit_id, root_path='/'):
623 622 """
624 623 Generate files for quick filter in files view
625 624 """
626 625
627 626 _files = list()
628 627 _dirs = list()
629 628 try:
630 629 _repo = self._get_repo(repo_name)
631 630 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
632 631 root_path = root_path.lstrip('/')
633 632
634 633 top_node = commit.get_node(root_path)
635 634 top_node.default_pre_load = []
636 635
637 636 for __, dirs, files in commit.walk(top_node):
638 637 for f in files:
639 638
640 639 _data = {
641 640 "name": h.escape(f.str_path),
642 641 "type": "file",
643 642 }
644 643
645 644 _files.append(_data)
646 645
647 646 for d in dirs:
648 647
649 648 _data = {
650 649 "name": h.escape(d.str_path),
651 650 "type": "dir",
652 651 }
653 652
654 653 _dirs.append(_data)
655 654 except RepositoryError:
656 655 log.exception("Exception in get_quick_filter_nodes")
657 656 raise
658 657
659 658 return _dirs, _files
660 659
661 660 def get_node(self, repo_name, commit_id, file_path,
662 661 extended_info=False, content=False, max_file_bytes=None, cache=True):
663 662 """
664 663 retrieve single node from commit
665 664 """
666 665
667 666 try:
668 667
669 668 _repo = self._get_repo(repo_name)
670 669 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
671 670
672 671 file_node = commit.get_node(file_path)
673 672 if file_node.is_dir():
674 673 raise RepositoryError('The given path is a directory')
675 674
676 675 _content = None
677 676 f_name = file_node.str_path
678 677
679 678 file_data = {
680 679 "name": h.escape(f_name),
681 680 "type": "file",
682 681 }
683 682
684 683 if extended_info:
685 684 file_data.update({
686 685 "extension": file_node.extension,
687 686 "mimetype": file_node.mimetype,
688 687 })
689 688
690 689 if cache:
691 690 md5 = file_node.md5
692 691 is_binary = file_node.is_binary
693 692 size = file_node.size
694 693 else:
695 694 is_binary, md5, size, _content = file_node.metadata_uncached()
696 695
697 696 file_data.update({
698 697 "md5": md5,
699 698 "binary": is_binary,
700 699 "size": size,
701 700 })
702 701
703 702 if content and cache:
704 703 # get content + cache
705 704 size = file_node.size
706 705 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
707 706 full_content = None
708 707 all_lines = 0
709 708 if not file_node.is_binary and not over_size_limit:
710 709 full_content = safe_str(file_node.content)
711 710 all_lines, empty_lines = file_node.count_lines(full_content)
712 711
713 712 file_data.update({
714 713 "content": full_content,
715 714 "lines": all_lines
716 715 })
717 716 elif content:
718 717 # get content *without* cache
719 718 if _content is None:
720 719 is_binary, md5, size, _content = file_node.metadata_uncached()
721 720
722 721 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
723 722 full_content = None
724 723 all_lines = 0
725 724 if not is_binary and not over_size_limit:
726 725 full_content = safe_str(_content)
727 726 all_lines, empty_lines = file_node.count_lines(full_content)
728 727
729 728 file_data.update({
730 729 "content": full_content,
731 730 "lines": all_lines
732 731 })
733 732
734 733 except RepositoryError:
735 734 log.exception("Exception in get_node")
736 735 raise
737 736
738 737 return file_data
739 738
740 739 def get_fts_data(self, repo_name, commit_id, root_path='/'):
741 740 """
742 741 Fetch node tree for usage in full text search
743 742 """
744 743
745 744 tree_info = list()
746 745
747 746 try:
748 747 _repo = self._get_repo(repo_name)
749 748 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
750 749 root_path = root_path.lstrip('/')
751 750 top_node = commit.get_node(root_path)
752 751 top_node.default_pre_load = []
753 752
754 753 for __, dirs, files in commit.walk(top_node):
755 754
756 755 for f in files:
757 756 is_binary, md5, size, _content = f.metadata_uncached()
758 757 _data = {
759 758 "name": f.str_path,
760 759 "md5": md5,
761 760 "extension": f.extension,
762 761 "binary": is_binary,
763 762 "size": size
764 763 }
765 764
766 765 tree_info.append(_data)
767 766
768 767 except RepositoryError:
769 768 log.exception("Exception in get_nodes")
770 769 raise
771 770
772 771 return tree_info
773 772
774 773 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
775 774 author=None, trigger_push_hook=True):
776 775 """
777 776 Commits given multiple nodes into repo
778 777
779 778 :param user: RhodeCode User object or user_id, the commiter
780 779 :param repo: RhodeCode Repository object
781 780 :param message: commit message
782 781 :param nodes: mapping {filename:{'content':content},...}
783 782 :param parent_commit: parent commit, can be empty than it's
784 783 initial commit
785 784 :param author: author of commit, cna be different that commiter
786 785 only for git
787 786 :param trigger_push_hook: trigger push hooks
788 787
789 788 :returns: new committed commit
790 789 """
791 790 user, scm_instance, message, commiter, author, imc = self.initialize_inmemory_vars(
792 791 user, repo, message, author)
793 792
794 793 parent_commit, parents = self.get_parent_commits(parent_commit, scm_instance)
795 794
796 795 upload_file_types = (io.BytesIO, io.BufferedRandom)
797 796 processed_nodes = []
798 797 for filename, content_dict in nodes.items():
799 798 if not isinstance(filename, bytes):
800 799 raise ValueError(f'filename key in nodes needs to be bytes , or {upload_file_types}')
801 800 content = content_dict['content']
802 801 if not isinstance(content, upload_file_types + (bytes,)):
803 802 raise ValueError('content key value in nodes needs to be bytes')
804 803
805 804 for f_path in nodes:
806 805 f_path = self._sanitize_path(f_path)
807 806 content = nodes[f_path]['content']
808 807
809 808 # decoding here will force that we have proper encoded values
810 809 # in any other case this will throw exceptions and deny commit
811 810
812 811 if isinstance(content, bytes):
813 812 pass
814 813 elif isinstance(content, upload_file_types):
815 814 content = content.read()
816 815 else:
817 816 raise Exception(f'Content is of unrecognized type {type(content)}, expected {upload_file_types}')
818 817 processed_nodes.append((f_path, content))
819 818
820 819 # add multiple nodes
821 820 for path, content in processed_nodes:
822 821 imc.add(FileNode(path, content=content))
823 822
824 823 # TODO: handle pre push scenario
825 824 tip = imc.commit(message=message,
826 825 author=author,
827 826 parents=parents,
828 827 branch=parent_commit.branch)
829 828
830 829 self.mark_for_invalidation(repo.repo_name)
831 830 if trigger_push_hook:
832 831 hooks_utils.trigger_post_push_hook(
833 832 username=user.username, action='push_local',
834 833 repo_name=repo.repo_name, repo_type=scm_instance.alias,
835 834 hook_type='post_push',
836 835 commit_ids=[tip.raw_id])
837 836 return tip
838 837
839 838 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
840 839 author=None, trigger_push_hook=True):
841 840 user, scm_instance, message, commiter, author, imc = self.initialize_inmemory_vars(
842 841 user, repo, message, author)
843 842
844 843 parent_commit, parents = self.get_parent_commits(parent_commit, scm_instance)
845 844
846 845 # add multiple nodes
847 846 for _filename, data in nodes.items():
848 847 # new filename, can be renamed from the old one, also sanitaze
849 848 # the path for any hack around relative paths like ../../ etc.
850 849 filename = self._sanitize_path(data['filename'])
851 850 old_filename = self._sanitize_path(_filename)
852 851 content = data['content']
853 852 file_mode = data.get('mode')
854 853 filenode = FileNode(old_filename, content=content, mode=file_mode)
855 854 op = data['op']
856 855 if op == 'add':
857 856 imc.add(filenode)
858 857 elif op == 'del':
859 858 imc.remove(filenode)
860 859 elif op == 'mod':
861 860 if filename != old_filename:
862 861 # TODO: handle renames more efficient, needs vcs lib changes
863 862 imc.remove(filenode)
864 863 imc.add(FileNode(filename, content=content, mode=file_mode))
865 864 else:
866 865 imc.change(filenode)
867 866
868 867 try:
869 868 # TODO: handle pre push scenario commit changes
870 869 tip = imc.commit(message=message,
871 870 author=author,
872 871 parents=parents,
873 872 branch=parent_commit.branch)
874 873 except NodeNotChangedError:
875 874 raise
876 875 except Exception as e:
877 876 log.exception("Unexpected exception during call to imc.commit")
878 877 raise IMCCommitError(str(e))
879 878 finally:
880 879 # always clear caches, if commit fails we want fresh object also
881 880 self.mark_for_invalidation(repo.repo_name)
882 881
883 882 if trigger_push_hook:
884 883 hooks_utils.trigger_post_push_hook(
885 884 username=user.username, action='push_local', hook_type='post_push',
886 885 repo_name=repo.repo_name, repo_type=scm_instance.alias,
887 886 commit_ids=[tip.raw_id])
888 887
889 888 return tip
890 889
891 890 def update_binary_node(self, user, repo, message, node, parent_commit=None, author=None):
892 891 user, scm_instance, message, commiter, author, imc = self.initialize_inmemory_vars(
893 892 user, repo, message, author)
894 893
895 894 parent_commit, parents = self.get_parent_commits(parent_commit, scm_instance)
896 895
897 896 file_path = node.get('file_path')
898 897 if isinstance(raw_content := node.get('content'), (io.BytesIO, io.BufferedRandom)):
899 898 content = raw_content.read()
900 899 else:
901 900 raise Exception("Wrong content was provided")
902 901 file_node = FileNode(file_path, content=content)
903 902 imc.change(file_node)
904 903
905 904 try:
906 905 tip = imc.commit(message=message,
907 906 author=author,
908 907 parents=parents,
909 908 branch=parent_commit.branch)
910 909 except NodeNotChangedError:
911 910 raise
912 911 except Exception as e:
913 912 log.exception("Unexpected exception during call to imc.commit")
914 913 raise IMCCommitError(str(e))
915 914 finally:
916 915 self.mark_for_invalidation(repo.repo_name)
917 916
918 917 hooks_utils.trigger_post_push_hook(
919 918 username=user.username, action='push_local', hook_type='post_push',
920 919 repo_name=repo.repo_name, repo_type=scm_instance.alias,
921 920 commit_ids=[tip.raw_id])
922 921 return tip
923 922
924 923 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
925 924 author=None, trigger_push_hook=True):
926 925 """
927 926 Deletes given multiple nodes into `repo`
928 927
929 928 :param user: RhodeCode User object or user_id, the committer
930 929 :param repo: RhodeCode Repository object
931 930 :param message: commit message
932 931 :param nodes: mapping {filename:{'content':content},...}
933 932 :param parent_commit: parent commit, can be empty than it's initial
934 933 commit
935 934 :param author: author of commit, cna be different that commiter only
936 935 for git
937 936 :param trigger_push_hook: trigger push hooks
938 937
939 938 :returns: new commit after deletion
940 939 """
941 940
942 941 user, scm_instance, message, commiter, author, imc = self.initialize_inmemory_vars(
943 942 user, repo, message, author)
944 943
945 944 processed_nodes = []
946 945 for f_path in nodes:
947 946 f_path = self._sanitize_path(f_path)
948 947 # content can be empty but for compatibility it allows same dicts
949 948 # structure as add_nodes
950 949 content = nodes[f_path].get('content')
951 950 processed_nodes.append((safe_bytes(f_path), content))
952 951
953 952 parent_commit, parents = self.get_parent_commits(parent_commit, scm_instance)
954 953
955 954 # add multiple nodes
956 955 for path, content in processed_nodes:
957 956 imc.remove(FileNode(path, content=content))
958 957
959 958 # TODO: handle pre push scenario
960 959 tip = imc.commit(message=message,
961 960 author=author,
962 961 parents=parents,
963 962 branch=parent_commit.branch)
964 963
965 964 self.mark_for_invalidation(repo.repo_name)
966 965 if trigger_push_hook:
967 966 hooks_utils.trigger_post_push_hook(
968 967 username=user.username, action='push_local', hook_type='post_push',
969 968 repo_name=repo.repo_name, repo_type=scm_instance.alias,
970 969 commit_ids=[tip.raw_id])
971 970 return tip
972 971
973 972 def strip(self, repo, commit_id, branch):
974 973 scm_instance = repo.scm_instance(cache=False)
975 974 scm_instance.config.clear_section('hooks')
976 975 scm_instance.strip(commit_id, branch)
977 976 self.mark_for_invalidation(repo.repo_name)
978 977
979 978 def get_unread_journal(self):
980 979 return self.sa.query(UserLog).count()
981 980
982 981 @classmethod
983 982 def backend_landing_ref(cls, repo_type):
984 983 """
985 984 Return a default landing ref based on a repository type.
986 985 """
987 986
988 987 landing_ref = {
989 988 'hg': ('branch:default', 'default'),
990 989 'git': ('branch:master', 'master'),
991 990 'svn': ('rev:tip', 'latest tip'),
992 991 'default': ('rev:tip', 'latest tip'),
993 992 }
994 993
995 994 return landing_ref.get(repo_type) or landing_ref['default']
996 995
997 996 def get_repo_landing_revs(self, translator, repo=None):
998 997 """
999 998 Generates select option with tags branches and bookmarks (for hg only)
1000 999 grouped by type
1001 1000
1002 1001 :param repo:
1003 1002 """
1004 1003 from rhodecode.lib.vcs.backends.git import GitRepository
1005 1004
1006 1005 _ = translator
1007 1006 repo = self._get_repo(repo)
1008 1007
1009 1008 if repo:
1010 1009 repo_type = repo.repo_type
1011 1010 else:
1012 1011 repo_type = 'default'
1013 1012
1014 1013 default_landing_ref, landing_ref_lbl = self.backend_landing_ref(repo_type)
1015 1014
1016 1015 default_ref_options = [
1017 1016 [default_landing_ref, landing_ref_lbl]
1018 1017 ]
1019 1018 default_choices = [
1020 1019 default_landing_ref
1021 1020 ]
1022 1021
1023 1022 if not repo:
1024 1023 # presented at NEW repo creation
1025 1024 return default_choices, default_ref_options
1026 1025
1027 1026 repo = repo.scm_instance()
1028 1027
1029 1028 ref_options = [(default_landing_ref, landing_ref_lbl)]
1030 1029 choices = [default_landing_ref]
1031 1030
1032 1031 # branches
1033 1032 branch_group = [(f'branch:{safe_str(b)}', safe_str(b)) for b in repo.branches]
1034 1033 if not branch_group:
1035 1034 # new repo, or without maybe a branch?
1036 1035 branch_group = default_ref_options
1037 1036
1038 1037 branches_group = (branch_group, _("Branches"))
1039 1038 ref_options.append(branches_group)
1040 1039 choices.extend([x[0] for x in branches_group[0]])
1041 1040
1042 1041 # bookmarks for HG
1043 1042 if repo.alias == 'hg':
1044 1043 bookmarks_group = (
1045 1044 [(f'book:{safe_str(b)}', safe_str(b))
1046 1045 for b in repo.bookmarks],
1047 1046 _("Bookmarks"))
1048 1047 ref_options.append(bookmarks_group)
1049 1048 choices.extend([x[0] for x in bookmarks_group[0]])
1050 1049
1051 1050 # tags
1052 1051 tags_group = (
1053 1052 [(f'tag:{safe_str(t)}', safe_str(t))
1054 1053 for t in repo.tags],
1055 1054 _("Tags"))
1056 1055 ref_options.append(tags_group)
1057 1056 choices.extend([x[0] for x in tags_group[0]])
1058 1057
1059 1058 return choices, ref_options
1060 1059
1061 1060 def get_server_info(self, environ=None):
1062 1061 server_info = get_system_info(environ)
1063 1062 return server_info
@@ -1,744 +1,748 b''
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2024 RhodeCode GmbH
3 2 #
4 3 # This program is free software: you can redistribute it and/or modify
5 4 # it under the terms of the GNU Affero General Public License, version 3
6 5 # (only), as published by the Free Software Foundation.
7 6 #
8 7 # This program is distributed in the hope that it will be useful,
9 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 10 # GNU General Public License for more details.
12 11 #
13 12 # You should have received a copy of the GNU Affero General Public License
14 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 14 #
16 15 # This program is dual-licensed. If you wish to learn more about the
17 16 # RhodeCode Enterprise Edition, including its added features, Support services,
18 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 18
20 19
21 20 """
22 21 Package for testing various lib/helper functions in rhodecode
23 22 """
24 23
25 24 import datetime
26 25 import string
27 26 import mock
28 27 import pytest
29 28 import functools
29 import time
30 30
31 31 from rhodecode.tests import no_newline_id_generator
32 32 from rhodecode.tests.utils import run_test_concurrently
33 33
34 34 from rhodecode.lib import rc_cache
35 35 from rhodecode.lib.helpers import InitialsGravatar
36 36 from rhodecode.lib.utils2 import AttributeDict
37 37
38 38 from rhodecode.model.db import Repository, CacheKey
39 39
40 40
41 41 TEST_URLS = [
42 42 ('127.0.0.1', '127.0.0.1'),
43 43 ('marcink@127.0.0.1', '127.0.0.1'),
44 44 ('marcink:pass@127.0.0.1', '127.0.0.1'),
45 45 ('marcink@domain.name:pass@127.0.0.1', '127.0.0.1'),
46 46
47 47 ('127.0.0.1:8080', '127.0.0.1:8080'),
48 48 ('marcink@127.0.0.1:8080', '127.0.0.1:8080'),
49 49 ('marcink:pass@127.0.0.1:8080', '127.0.0.1:8080'),
50 50 ('marcink@domain.name:pass@127.0.0.1:8080', '127.0.0.1:8080'),
51 51
52 52 ('domain.org', 'domain.org'),
53 53 ('user:pass@domain.org:8080', 'domain.org:8080'),
54 54 ('user@domain.org:pass@domain.org:8080', 'domain.org:8080'),
55 55 ]
56 56
57 57
58 58 @pytest.mark.parametrize("protocol", ['http://', 'https://'])
59 59 @pytest.mark.parametrize("test_url, expected", TEST_URLS)
60 60 def test_credentials_filter(protocol, test_url, expected):
61 61 from rhodecode.lib.utils2 import credentials_filter
62 62 test_url = protocol + test_url
63 63 assert credentials_filter(test_url) == protocol + expected
64 64
65 65
66 66 @pytest.mark.parametrize("str_bool, expected", [
67 67 ('t', True),
68 68 ('true', True),
69 69 ('y', True),
70 70 ('yes', True),
71 71 ('on', True),
72 72 ('1', True),
73 73 ('Y', True),
74 74 ('yeS', True),
75 75 ('Y', True),
76 76 ('TRUE', True),
77 77 ('T', True),
78 78 ('False', False),
79 79 ('F', False),
80 80 ('FALSE', False),
81 81 ('0', False),
82 82 ('-1', False),
83 83 ('', False)
84 84 ])
85 85 def test_str2bool(str_bool, expected):
86 86 from rhodecode.lib.utils2 import str2bool
87 87 assert str2bool(str_bool) == expected
88 88
89 89
90 90 @pytest.mark.parametrize("text, expected", functools.reduce(lambda a1, a2: a1+a2, [
91 91 [
92 92 (pref+"", []),
93 93 (pref+"Hi there @marcink", ['marcink']),
94 94 (pref+"Hi there @marcink and @bob", ['bob', 'marcink']),
95 95 (pref+"Hi there @marcink\n", ['marcink']),
96 96 (pref+"Hi there @marcink and @bob\n", ['bob', 'marcink']),
97 97 (pref+"Hi there marcin@rhodecode.com", []),
98 98 (pref+"Hi there @john.malcovic and @bob\n", ['bob', 'john.malcovic']),
99 99 (pref+"This needs to be reviewed: (@marcink,@john)", ["john", "marcink"]),
100 100 (pref+"This needs to be reviewed: (@marcink, @john)", ["john", "marcink"]),
101 101 (pref+"This needs to be reviewed: [@marcink,@john]", ["john", "marcink"]),
102 102 (pref+"This needs to be reviewed: (@marcink @john)", ["john", "marcink"]),
103 103 (pref+"@john @mary, please review", ["john", "mary"]),
104 104 (pref+"@john,@mary, please review", ["john", "mary"]),
105 105 (pref+"Hej @123, @22john,@mary, please review", ['123', '22john', 'mary']),
106 106 (pref+"@first hi there @marcink here's my email marcin@email.com "
107 107 "@lukaszb check @one_more22 it pls @ ttwelve @D[] @one@two@three ", ['first', 'lukaszb', 'marcink', 'one', 'one_more22']),
108 108 (pref+"@MARCIN @maRCiN @2one_more22 @john please see this http://org.pl", ['2one_more22', 'john', 'MARCIN', 'maRCiN']),
109 109 (pref+"@marian.user just do it @marco-polo and next extract @marco_polo", ['marco-polo', 'marco_polo', 'marian.user']),
110 110 (pref+"user.dot hej ! not-needed maril@domain.org", []),
111 111 (pref+"\n@marcin", ['marcin']),
112 112 ]
113 113 for pref in ['', '\n', 'hi !', '\t', '\n\n']]), ids=no_newline_id_generator)
114 114 def test_mention_extractor(text, expected):
115 115 from rhodecode.lib.utils2 import extract_mentioned_users
116 116 got = extract_mentioned_users(text)
117 117 assert sorted(got, key=lambda x: x.lower()) == got
118 118 assert set(expected) == set(got)
119 119
120 120 @pytest.mark.parametrize("age_args, expected, kw", [
121 121 ({}, u'just now', {}),
122 122 ({'seconds': -1}, u'1 second ago', {}),
123 123 ({'seconds': -60 * 2}, u'2 minutes ago', {}),
124 124 ({'hours': -1}, u'1 hour ago', {}),
125 125 ({'hours': -24}, u'1 day ago', {}),
126 126 ({'hours': -24 * 5}, u'5 days ago', {}),
127 127 ({'months': -1}, u'1 month ago', {}),
128 128 ({'months': -1, 'days': -2}, u'1 month and 2 days ago', {}),
129 129 ({'years': -1, 'months': -1}, u'1 year and 1 month ago', {}),
130 130 ({}, u'just now', {'short_format': True}),
131 131 ({'seconds': -1}, u'1sec ago', {'short_format': True}),
132 132 ({'seconds': -60 * 2}, u'2min ago', {'short_format': True}),
133 133 ({'hours': -1}, u'1h ago', {'short_format': True}),
134 134 ({'hours': -24}, u'1d ago', {'short_format': True}),
135 135 ({'hours': -24 * 5}, u'5d ago', {'short_format': True}),
136 136 ({'months': -1}, u'1m ago', {'short_format': True}),
137 137 ({'months': -1, 'days': -2}, u'1m, 2d ago', {'short_format': True}),
138 138 ({'years': -1, 'months': -1}, u'1y, 1m ago', {'short_format': True}),
139 139 ])
140 140 def test_age(age_args, expected, kw, baseapp):
141 141 from rhodecode.lib.utils2 import age
142 142 from dateutil import relativedelta
143 143 n = datetime.datetime(year=2012, month=5, day=17)
144 144 def delt(*args, **kwargs):
145 145 return relativedelta.relativedelta(*args, **kwargs)
146 146
147 147 def translate(elem):
148 148 return elem.interpolate()
149 149
150 150 assert translate(age(n + delt(**age_args), now=n, **kw)) == expected
151 151
152 152
153 153 @pytest.mark.parametrize("age_args, expected, kw", [
154 154 ({}, u'just now', {}),
155 155 ({'seconds': 1}, u'in 1 second', {}),
156 156 ({'seconds': 60 * 2}, u'in 2 minutes', {}),
157 157 ({'hours': 1}, u'in 1 hour', {}),
158 158 ({'hours': 24}, u'in 1 day', {}),
159 159 ({'hours': 24 * 5}, u'in 5 days', {}),
160 160 ({'months': 1}, u'in 1 month', {}),
161 161 ({'months': 1, 'days': 1}, u'in 1 month and 1 day', {}),
162 162 ({'years': 1, 'months': 1}, u'in 1 year and 1 month', {}),
163 163 ({}, u'just now', {'short_format': True}),
164 164 ({'seconds': 1}, u'in 1sec', {'short_format': True}),
165 165 ({'seconds': 60 * 2}, u'in 2min', {'short_format': True}),
166 166 ({'hours': 1}, u'in 1h', {'short_format': True}),
167 167 ({'hours': 24}, u'in 1d', {'short_format': True}),
168 168 ({'hours': 24 * 5}, u'in 5d', {'short_format': True}),
169 169 ({'months': 1}, u'in 1m', {'short_format': True}),
170 170 ({'months': 1, 'days': 1}, u'in 1m, 1d', {'short_format': True}),
171 171 ({'years': 1, 'months': 1}, u'in 1y, 1m', {'short_format': True}),
172 172 ])
173 173 def test_age_in_future(age_args, expected, kw, baseapp):
174 174 from rhodecode.lib.utils2 import age
175 175 from dateutil import relativedelta
176 176 n = datetime.datetime(year=2012, month=5, day=17)
177 177 def delt(*args, **kwargs):
178 178 return relativedelta.relativedelta(*args, **kwargs)
179 179
180 180 def translate(elem):
181 181 return elem.interpolate()
182 182
183 183 assert translate(age(n + delt(**age_args), now=n, **kw)) == expected
184 184
185 185
186 186 @pytest.mark.parametrize("sample, expected_tags", [
187 187 # entry
188 188 ((
189 189 ""
190 190 ),
191 191 [
192 192
193 193 ]),
194 194 # entry
195 195 ((
196 196 "hello world [stale]"
197 197 ),
198 198 [
199 199 ('state', '[stale]'),
200 200 ]),
201 201 # entry
202 202 ((
203 203 "hello world [v2.0.0] [v1.0.0]"
204 204 ),
205 205 [
206 206 ('generic', '[v2.0.0]'),
207 207 ('generic', '[v1.0.0]'),
208 208 ]),
209 209 # entry
210 210 ((
211 211 "he[ll]o wo[rl]d"
212 212 ),
213 213 [
214 214 ('label', '[ll]'),
215 215 ('label', '[rl]'),
216 216 ]),
217 217 # entry
218 218 ((
219 219 "hello world [stale]\n[featured]\n[stale] [dead] [dev]"
220 220 ),
221 221 [
222 222 ('state', '[stale]'),
223 223 ('state', '[featured]'),
224 224 ('state', '[stale]'),
225 225 ('state', '[dead]'),
226 226 ('state', '[dev]'),
227 227 ]),
228 228 # entry
229 229 ((
230 230 "hello world \n\n [stale] \n [url =&gt; [name](http://rc.com)]"
231 231 ),
232 232 [
233 233 ('state', '[stale]'),
234 234 ('url', '[url =&gt; [name](http://rc.com)]'),
235 235 ]),
236 236 # entry
237 237 ((
238 238 "[url =&gt; [linkNameJS](javascript:alert(document.domain))]\n"
239 239 "[url =&gt; [linkNameHTTP](http://rhodecode.com)]\n"
240 240 "[url =&gt; [linkNameHTTPS](https://rhodecode.com)]\n"
241 241 "[url =&gt; [linkNamePath](/repo_group)]\n"
242 242 ),
243 243 [
244 244 ('generic', '[linkNameJS]'),
245 245 ('url', '[url =&gt; [linkNameHTTP](http://rhodecode.com)]'),
246 246 ('url', '[url =&gt; [linkNameHTTPS](https://rhodecode.com)]'),
247 247 ('url', '[url =&gt; [linkNamePath](/repo_group)]'),
248 248 ]),
249 249 # entry
250 250 ((
251 251 "hello pta[tag] gog [[]] [[] sda ero[or]d [me =&gt;>< sa]"
252 252 "[requires] [stale] [see<>=&gt;] [see =&gt; http://url.com]"
253 253 "[requires =&gt; url] [lang =&gt; python] [just a tag] "
254 254 "<html_tag first='abc' attr=\"my.url?attr=&another=\"></html_tag>"
255 255 "[,d] [ =&gt; ULR ] [obsolete] [desc]]"
256 256 ),
257 257 [
258 258 ('label', '[desc]'),
259 259 ('label', '[obsolete]'),
260 260 ('label', '[or]'),
261 261 ('label', '[requires]'),
262 262 ('label', '[tag]'),
263 263 ('state', '[stale]'),
264 264 ('lang', '[lang =&gt; python]'),
265 265 ('ref', '[requires =&gt; url]'),
266 266 ('see', '[see =&gt; http://url.com]'),
267 267
268 268 ]),
269 269
270 270 ], ids=no_newline_id_generator)
271 271 def test_metatag_extraction(sample, expected_tags):
272 272 from rhodecode.lib.helpers import extract_metatags
273 273 tags, value = extract_metatags(sample)
274 274 assert sorted(tags) == sorted(expected_tags)
275 275
276 276
277 277 @pytest.mark.parametrize("tag_data, expected_html", [
278 278
279 279 (('state', '[stable]'), '<div class="metatag" tag="state stable">stable</div>'),
280 280 (('state', '[stale]'), '<div class="metatag" tag="state stale">stale</div>'),
281 281 (('state', '[featured]'), '<div class="metatag" tag="state featured">featured</div>'),
282 282 (('state', '[dev]'), '<div class="metatag" tag="state dev">dev</div>'),
283 283 (('state', '[dead]'), '<div class="metatag" tag="state dead">dead</div>'),
284 284
285 285 (('label', '[personal]'), '<div class="metatag" tag="label">personal</div>'),
286 286 (('generic', '[v2.0.0]'), '<div class="metatag" tag="generic">v2.0.0</div>'),
287 287
288 288 (('lang', '[lang =&gt; JavaScript]'), '<div class="metatag" tag="lang">JavaScript</div>'),
289 289 (('lang', '[lang =&gt; C++]'), '<div class="metatag" tag="lang">C++</div>'),
290 290 (('lang', '[lang =&gt; C#]'), '<div class="metatag" tag="lang">C#</div>'),
291 291 (('lang', '[lang =&gt; Delphi/Object]'), '<div class="metatag" tag="lang">Delphi/Object</div>'),
292 292 (('lang', '[lang =&gt; Objective-C]'), '<div class="metatag" tag="lang">Objective-C</div>'),
293 293 (('lang', '[lang =&gt; .NET]'), '<div class="metatag" tag="lang">.NET</div>'),
294 294
295 295 (('license', '[license =&gt; BSD 3-clause]'), '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/BSD 3-clause">BSD 3-clause</a></div>'),
296 296 (('license', '[license =&gt; GPLv3]'), '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/GPLv3">GPLv3</a></div>'),
297 297 (('license', '[license =&gt; MIT]'), '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/MIT">MIT</a></div>'),
298 298 (('license', '[license =&gt; AGPLv3]'), '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/AGPLv3">AGPLv3</a></div>'),
299 299
300 300 (('ref', '[requires =&gt; RepoName]'), '<div class="metatag" tag="ref requires">requires: <a href="/RepoName">RepoName</a></div>'),
301 301 (('ref', '[recommends =&gt; GroupName]'), '<div class="metatag" tag="ref recommends">recommends: <a href="/GroupName">GroupName</a></div>'),
302 302 (('ref', '[conflicts =&gt; SomeName]'), '<div class="metatag" tag="ref conflicts">conflicts: <a href="/SomeName">SomeName</a></div>'),
303 303 (('ref', '[base =&gt; SomeName]'), '<div class="metatag" tag="ref base">base: <a href="/SomeName">SomeName</a></div>'),
304 304
305 305 (('see', '[see =&gt; http://rhodecode.com]'), '<div class="metatag" tag="see">see: http://rhodecode.com </div>'),
306 306
307 307 (('url', '[url =&gt; [linkName](https://rhodecode.com)]'), '<div class="metatag" tag="url"> <a href="https://rhodecode.com">linkName</a> </div>'),
308 308 (('url', '[url =&gt; [example link](https://rhodecode.com)]'), '<div class="metatag" tag="url"> <a href="https://rhodecode.com">example link</a> </div>'),
309 309 (('url', '[url =&gt; [v1.0.0](https://rhodecode.com)]'), '<div class="metatag" tag="url"> <a href="https://rhodecode.com">v1.0.0</a> </div>'),
310 310
311 311 ])
312 312 def test_metatags_stylize(tag_data, expected_html):
313 313 from rhodecode.lib.helpers import style_metatag
314 314 tag_type,value = tag_data
315 315 assert style_metatag(tag_type, value) == expected_html
316 316
317 317
318 318 @pytest.mark.parametrize("tmpl_url, email, expected", [
319 319 ('http://test.com/{email}', 'test@foo.com', 'http://test.com/test@foo.com'),
320 320
321 321 ('http://test.com/{md5email}', 'test@foo.com', 'http://test.com/3cb7232fcc48743000cb86d0d5022bd9'),
322 322 ('http://test.com/{md5email}', 'testΔ…Δ‡@foo.com', 'http://test.com/978debb907a3c55cd741872ab293ef30'),
323 323
324 324 ('http://testX.com/{md5email}?s={size}', 'test@foo.com', 'http://testX.com/3cb7232fcc48743000cb86d0d5022bd9?s=24'),
325 325 ('http://testX.com/{md5email}?s={size}', 'testΔ…Δ‡@foo.com', 'http://testX.com/978debb907a3c55cd741872ab293ef30?s=24'),
326 326
327 327 ('{scheme}://{netloc}/{md5email}/{size}', 'test@foo.com', 'https://server.com/3cb7232fcc48743000cb86d0d5022bd9/24'),
328 328 ('{scheme}://{netloc}/{md5email}/{size}', 'testΔ…Δ‡@foo.com', 'https://server.com/978debb907a3c55cd741872ab293ef30/24'),
329 329
330 330 ('http://test.com/{email}', 'testΔ…Δ‡@foo.com', 'http://test.com/testΔ…Δ‡@foo.com'),
331 331 ('http://test.com/{email}?size={size}', 'test@foo.com', 'http://test.com/test@foo.com?size=24'),
332 332 ('http://test.com/{email}?size={size}', 'testΔ…Δ‡@foo.com', 'http://test.com/testΔ…Δ‡@foo.com?size=24'),
333 333 ])
334 334 def test_gravatar_url_builder(tmpl_url, email, expected, request_stub):
335 335 from rhodecode.lib.helpers import gravatar_url
336 336
337 337 def fake_tmpl_context(_url):
338 338 _c = AttributeDict()
339 339 _c.visual = AttributeDict()
340 340 _c.visual.use_gravatar = True
341 341 _c.visual.gravatar_url = _url
342 342 return _c
343 343
344 344 # mock pyramid.threadlocals
345 345 def fake_get_current_request():
346 346 request_stub.scheme = 'https'
347 347 request_stub.host = 'server.com'
348 348
349 349 request_stub._call_context = fake_tmpl_context(tmpl_url)
350 350 return request_stub
351 351
352 352 with mock.patch('rhodecode.lib.helpers.get_current_request',
353 353 fake_get_current_request):
354 354
355 355 grav = gravatar_url(email_address=email, size=24)
356 356 assert grav == expected
357 357
358 358
359 359 @pytest.mark.parametrize(
360 360 "email, first_name, last_name, expected_initials, expected_color", [
361 361
362 362 ('test@rhodecode.com', '', '', 'TR', '#8a994d'),
363 363 ('marcin.kuzminski@rhodecode.com', '', '', 'MK', '#6559b3'),
364 364 # special cases of email
365 365 ('john.van.dam@rhodecode.com', '', '', 'JD', '#526600'),
366 366 ('Guido.van.Rossum@rhodecode.com', '', '', 'GR', '#990052'),
367 367 ('Guido.van.Rossum@rhodecode.com', 'Guido', 'Van Rossum', 'GR', '#990052'),
368 368
369 369 ('rhodecode+Guido.van.Rossum@rhodecode.com', '', '', 'RR', '#46598c'),
370 370 ('pclouds@rhodecode.com', 'Nguyα»…n ThΓ‘i', 'Tgọc Duy', 'ND', '#665200'),
371 371
372 372 ('john-brown@foo.com', '', '', 'JF', '#73006b'),
373 373 ('admin@rhodecode.com', 'Marcin', 'Kuzminski', 'MK', '#104036'),
374 374 # partials
375 375 ('admin@rhodecode.com', 'Marcin', '', 'MR', '#104036'), # fn+email
376 376 ('admin@rhodecode.com', '', 'Kuzminski', 'AK', '#104036'), # em+ln
377 377 # non-ascii
378 378 ('admin@rhodecode.com', 'Marcin', 'Śuzminski', 'MS', '#104036'),
379 379 ('admin@rhodecode.com', 'Łukasz', 'Śuzminski', 'LS', '#104036'),
380 380 ('admin@rhodecode.com', 'Fabian', 'Łukaszewski', 'FL', '#104036'),
381 381
382 382 ('marcin.Ε›uzminski@rhodecode.com', '', '', 'MS', '#73000f'),
383 383
384 384 # special cases, LDAP can provide those...
385 385 ('admin@', 'Marcin', 'Śuzminski', 'MS', '#aa00ff'),
386 386 ('marcin.Ε›uzminski', '', '', 'MS', '#402020'),
387 387 ('null', '', '', 'NL', '#8c4646'),
388 388 ('some.@abc.com', 'some', '', 'SA', '#664e33')
389 389 ])
390 390 def test_initials_gravatar_pick_of_initials_and_color_algo(
391 391 email, first_name, last_name, expected_initials, expected_color):
392 392 instance = InitialsGravatar(email, first_name, last_name)
393 393 assert instance.get_initials() == expected_initials
394 394 assert instance.str2color(email) == expected_color
395 395
396 396
397 397 def test_initials_gravatar_mapping_algo():
398 398 pos = set()
399 399 instance = InitialsGravatar('', '', '')
400 400 iterations = 0
401 401
402 402 variations = []
403 403 for letter1 in string.ascii_letters:
404 404 for letter2 in string.ascii_letters[::-1][:10]:
405 405 for letter3 in string.ascii_letters[:10]:
406 406 variations.append(
407 407 '%s@rhodecode.com' % (letter1+letter2+letter3))
408 408
409 409 max_variations = 4096
410 410 for email in variations[:max_variations]:
411 411 iterations += 1
412 412 pos.add(
413 413 instance.pick_color_bank_index(email,
414 414 instance.get_color_bank()))
415 415
416 416 # we assume that we have match all 256 possible positions,
417 417 # in reasonable amount of different email addresses
418 418 assert len(pos) == 256
419 419 assert iterations == max_variations
420 420
421 421
422 422 @pytest.mark.parametrize("tmpl, repo_name, overrides, prefix, expected", [
423 423 (Repository.DEFAULT_CLONE_URI, 'group/repo1', {}, '', 'http://vps1:8000/group/repo1'),
424 424 (Repository.DEFAULT_CLONE_URI, 'group/repo1', {'user': 'marcink'}, '', 'http://marcink@vps1:8000/group/repo1'),
425 425 (Repository.DEFAULT_CLONE_URI, 'group/repo1', {}, '/rc', 'http://vps1:8000/rc/group/repo1'),
426 426 (Repository.DEFAULT_CLONE_URI, 'group/repo1', {'user': 'user'}, '/rc', 'http://user@vps1:8000/rc/group/repo1'),
427 427 (Repository.DEFAULT_CLONE_URI, 'group/repo1', {'user': 'marcink'}, '/rc', 'http://marcink@vps1:8000/rc/group/repo1'),
428 428 (Repository.DEFAULT_CLONE_URI, 'group/repo1', {'user': 'user'}, '/rc/', 'http://user@vps1:8000/rc/group/repo1'),
429 429 (Repository.DEFAULT_CLONE_URI, 'group/repo1', {'user': 'marcink'}, '/rc/', 'http://marcink@vps1:8000/rc/group/repo1'),
430 430 ('{scheme}://{user}@{netloc}/_{repoid}', 'group/repo1', {}, '', 'http://vps1:8000/_23'),
431 431 ('{scheme}://{user}@{netloc}/_{repoid}', 'group/repo1', {'user': 'marcink'}, '', 'http://marcink@vps1:8000/_23'),
432 432 ('http://{user}@{netloc}/_{repoid}', 'group/repo1', {'user': 'marcink'}, '', 'http://marcink@vps1:8000/_23'),
433 433 ('http://{netloc}/_{repoid}', 'group/repo1', {'user': 'marcink'}, '', 'http://vps1:8000/_23'),
434 434 ('https://{user}@proxy1.server.com/{repo}', 'group/repo1', {'user': 'marcink'}, '', 'https://marcink@proxy1.server.com/group/repo1'),
435 435 ('https://{user}@proxy1.server.com/{repo}', 'group/repo1', {}, '', 'https://proxy1.server.com/group/repo1'),
436 436 ('https://proxy1.server.com/{user}/{repo}', 'group/repo1', {'user': 'marcink'}, '', 'https://proxy1.server.com/marcink/group/repo1'),
437 437 ])
438 438 def test_clone_url_generator(tmpl, repo_name, overrides, prefix, expected):
439 439 from rhodecode.lib.utils2 import get_clone_url
440 440
441 441 class RequestStub(object):
442 442 def request_url(self, name):
443 443 return 'http://vps1:8000' + prefix
444 444
445 445 def route_url(self, name):
446 446 return self.request_url(name)
447 447
448 448 clone_url = get_clone_url(
449 449 request=RequestStub(),
450 450 uri_tmpl=tmpl,
451 451 repo_name=repo_name, repo_id=23, repo_type='hg', **overrides)
452 452 assert clone_url == expected
453 453
454 454
455 455 def test_clone_url_svn_ssh_generator():
456 456 from rhodecode.lib.utils2 import get_clone_url
457 457
458 458 class RequestStub(object):
459 459 def request_url(self, name):
460 460 return 'http://vps1:8000'
461 461
462 462 def route_url(self, name):
463 463 return self.request_url(name)
464 464
465 465 clone_url = get_clone_url(
466 466 request=RequestStub(),
467 467 uri_tmpl=Repository.DEFAULT_CLONE_URI_SSH,
468 468 repo_name='svn-test', repo_id=23, repo_type='svn', **{'sys_user': 'rcdev'})
469 469 assert clone_url == 'svn+ssh://rcdev@vps1/svn-test'
470 470
471 471
472 472 idx = 0
473 473
474 474
475 475 def _quick_url(text, tmpl="""<a class="tooltip-hovercard revision-link" href="%s" data-hovercard-alt="Commit: %s" data-hovercard-url="/some-url">%s</a>""", url_=None, commits=''):
476 476 """
477 477 Changes `some text url[foo]` => `some text <a href="/">foo</a>
478 478
479 479 :param text:
480 480 """
481 481 import re
482 482 # quickly change expected url[] into a link
483 483 url_pat = re.compile(r'(?:url\[)(.+?)(?:\])')
484 484 commits = commits or []
485 485
486 486 global idx
487 487 idx = 0
488 488
489 489 def url_func(match_obj):
490 490 global idx
491 491 _url = match_obj.groups()[0]
492 492 if commits:
493 493 commit = commits[idx]
494 494 idx += 1
495 495 return tmpl % (url_ or '/some-url', _url, commit)
496 496 else:
497 497 return tmpl % (url_ or '/some-url', _url)
498 498
499 499 return url_pat.sub(url_func, text)
500 500
501 501
502 502 @pytest.mark.parametrize("sample, expected, commits", [
503 503 (
504 504 "",
505 505 "",
506 506 [""]
507 507 ),
508 508 (
509 509 "git-svn-id: https://svn.apache.org/repos/asf/libcloud/trunk@1441655 13f79535-47bb-0310-9956-ffa450edef68",
510 510 "git-svn-id: https://svn.apache.org/repos/asf/libcloud/trunk@1441655 13f79535-47bb-0310-9956-ffa450edef68",
511 511 [""]
512 512 ),
513 513 (
514 514 "from rev 000000000000",
515 515 "from rev url[000000000000]",
516 516 ["000000000000"]
517 517 ),
518 518
519 519 (
520 520 "from rev 000000000000123123 also rev 000000000000",
521 521 "from rev url[000000000000123123] also rev url[000000000000]",
522 522 ["000000000000123123", "000000000000"]
523 523 ),
524 524 (
525 525 "this should-000 00",
526 526 "this should-000 00",
527 527 [""]
528 528 ),
529 529 (
530 530 "longtextffffffffff rev 123123123123",
531 531 "longtextffffffffff rev url[123123123123]",
532 532 ["123123123123"]
533 533 ),
534 534 (
535 535 "rev ffffffffffffffffffffffffffffffffffffffffffffffffff",
536 536 "rev ffffffffffffffffffffffffffffffffffffffffffffffffff",
537 537 ["ffffffffffffffffffffffffffffffffffffffffffffffffff"]
538 538 ),
539 539 (
540 540 "ffffffffffff some text traalaa",
541 541 "url[ffffffffffff] some text traalaa",
542 542 ["ffffffffffff"]
543 543 ),
544 544 (
545 545 """Multi line
546 546 123123123123
547 547 some text 000000000000
548 548 sometimes !
549 549 """,
550 550 """Multi line
551 551 url[123123123123]
552 552 some text url[000000000000]
553 553 sometimes !
554 554 """,
555 555 ["123123123123", "000000000000"]
556 556 )
557 557 ], ids=no_newline_id_generator)
558 558 def test_urlify_commits(sample, expected, commits):
559 559 def fake_url(self, *args, **kwargs):
560 560 return '/some-url'
561 561
562 562 expected = _quick_url(expected, commits=commits)
563 563
564 564 with mock.patch('rhodecode.lib.helpers.route_url', fake_url):
565 565 from rhodecode.lib.helpers import urlify_commits
566 566 assert urlify_commits(sample, 'repo_name') == expected
567 567
568 568
569 569 @pytest.mark.parametrize("sample, expected, url_", [
570 570 ("",
571 571 "",
572 572 ""),
573 573 ("https://svn.apache.org/repos",
574 574 "url[https://svn.apache.org/repos]",
575 575 "https://svn.apache.org/repos"),
576 576 ("http://svn.apache.org/repos",
577 577 "url[http://svn.apache.org/repos]",
578 578 "http://svn.apache.org/repos"),
579 579 ("from rev a also rev http://google.com",
580 580 "from rev a also rev url[http://google.com]",
581 581 "http://google.com"),
582 582 ("""Multi line
583 583 https://foo.bar.com
584 584 some text lalala""",
585 585 """Multi line
586 586 url[https://foo.bar.com]
587 587 some text lalala""",
588 588 "https://foo.bar.com")
589 589 ], ids=no_newline_id_generator)
590 590 def test_urlify_test(sample, expected, url_):
591 591 from rhodecode.lib.helpers import urlify_text
592 592 expected = _quick_url(expected, tmpl="""<a href="%s">%s</a>""", url_=url_)
593 593 assert urlify_text(sample) == expected
594 594
595 595
596 596 @pytest.mark.parametrize("test, expected", [
597 597 ("", None),
598 598 ("/_2", '2'),
599 599 ("_2", '2'),
600 600 ("/_2/", '2'),
601 601 ("_2/", '2'),
602 602
603 603 ("/_21", '21'),
604 604 ("_21", '21'),
605 605 ("/_21/", '21'),
606 606 ("_21/", '21'),
607 607
608 608 ("/_21/foobar", '21'),
609 609 ("_21/121", '21'),
610 610 ("/_21/_12", '21'),
611 611 ("_21/rc/foo", '21'),
612 612
613 613 ])
614 614 def test_get_repo_by_id(test, expected):
615 615 from rhodecode.model.repo import RepoModel
616 616 _test = RepoModel()._extract_id_from_repo_name(test)
617 617 assert _test == expected
618 618
619 619
620 620 def test_invalidation_context(baseapp):
621 621 repo_id = 9999
622 calls = [1, 2]
623 call_args = ('some-key',)
624 region = rc_cache.get_or_create_region('cache_repo_longterm')
622 625
623 cache_namespace_uid = 'cache_repo_instance.{}_{}'.format(
624 repo_id, CacheKey.CACHE_TYPE_FEED)
625 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
626 repo_id=repo_id)
627 region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid)
626 repo_namespace_key = CacheKey.REPO_INVALIDATION_NAMESPACE.format(repo_id=repo_id)
627 inv_context_manager = rc_cache.InvalidationContext(key=repo_namespace_key)
628 628
629 calls = [1, 2]
629 def cache_generator(_state_uid):
630 630
631 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid)
632 def _dummy_func(cache_key):
633 val = calls.pop(0)
634 return 'result:{}'.format(val)
631 @region.conditional_cache_on_arguments(namespace=f'some-common-namespace-{repo_id}')
632 def _dummy_func(*args):
633 val = calls.pop(0)
634 return _state_uid, f'result:{val}'
635 635
636 inv_context_manager = rc_cache.InvalidationContext(
637 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace)
636 return _dummy_func
638 637
639 638 # 1st call, fresh caches
640 639 with inv_context_manager as invalidation_context:
641 should_invalidate = invalidation_context.should_invalidate()
640 cache_state_uid = invalidation_context.state_uid
641 cache_func = cache_generator(cache_state_uid)
642 previous_state_uid, result = cache_func(*call_args)
643
644 should_invalidate = previous_state_uid != cache_state_uid
642 645 if should_invalidate:
643 result = _dummy_func.refresh('some-key')
644 else:
645 result = _dummy_func('some-key')
646 _, result = cache_func.refresh(*call_args)
646 647
647 assert isinstance(invalidation_context, rc_cache.FreshRegionCache)
648 assert should_invalidate is True
648 assert should_invalidate is False # 1st call, we don't need to invalidate
649 649
650 650 assert 'result:1' == result
651 # should be cached so calling it twice will give the same result !
652 result = _dummy_func('some-key')
651 # should be already cached so calling it twice will give the same result!
652 _, result = cache_func(*call_args)
653 653 assert 'result:1' == result
654 654
655 655 # 2nd call, we create a new context manager, this should be now key aware, and
656 # return an active cache region
656 # return an active cache region from DB based on the same uid
657 657 with inv_context_manager as invalidation_context:
658 should_invalidate = invalidation_context.should_invalidate()
659 assert isinstance(invalidation_context, rc_cache.ActiveRegionCache)
660 assert should_invalidate is False
658 cache_state_uid = invalidation_context.state_uid
659 cache_func = cache_generator(cache_state_uid)
660 previous_state_uid, result = cache_func(*call_args)
661
662 should_invalidate = previous_state_uid != cache_state_uid
663 if should_invalidate:
664 _, result = cache_func.refresh(*call_args)
665
666 assert should_invalidate is False # 1st call, we don't need to invalidate
661 667
662 668 # Mark invalidation
663 CacheKey.set_invalidate(invalidation_namespace)
669 CacheKey.set_invalidate(repo_namespace_key)
664 670
665 671 # 3nd call, fresh caches
666 672 with inv_context_manager as invalidation_context:
667 should_invalidate = invalidation_context.should_invalidate()
673 cache_state_uid = invalidation_context.state_uid
674 cache_func = cache_generator(cache_state_uid)
675 previous_state_uid, result = cache_func(*call_args)
676
677 should_invalidate = previous_state_uid != cache_state_uid
668 678 if should_invalidate:
669 result = _dummy_func.refresh('some-key')
670 else:
671 result = _dummy_func('some-key')
679 _, result = cache_func.refresh(*call_args)
672 680
673 assert isinstance(invalidation_context, rc_cache.FreshRegionCache)
674 681 assert should_invalidate is True
675 682
676 683 assert 'result:2' == result
677 684
678 685 # cached again, same result
679 result = _dummy_func('some-key')
686 _, result = cache_func(*call_args)
680 687 assert 'result:2' == result
681 688
682 689
683 690 def test_invalidation_context_exception_in_compute(baseapp):
684 691 repo_id = 888
692 region = rc_cache.get_or_create_region('cache_repo_longterm')
685 693
686 cache_namespace_uid = 'cache_repo_instance.{}_{}'.format(
687 repo_id, CacheKey.CACHE_TYPE_FEED)
688 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
689 repo_id=repo_id)
690 region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid)
694 repo_namespace_key = CacheKey.REPO_INVALIDATION_NAMESPACE.format(repo_id=repo_id)
695 inv_context_manager = rc_cache.InvalidationContext(key=repo_namespace_key)
691 696
692 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid)
693 def _dummy_func(cache_key):
694 raise Exception('Error in cache func')
697 def cache_generator(_state_uid):
698 @region.conditional_cache_on_arguments(namespace=f'some-common-namespace-{repo_id}')
699 def _dummy_func(*args):
700 raise Exception('Error in cache func')
701
702 return _dummy_func
695 703
696 704 with pytest.raises(Exception):
697 inv_context_manager = rc_cache.InvalidationContext(
698 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace)
699 705
700 706 # 1st call, fresh caches
701 707 with inv_context_manager as invalidation_context:
702 should_invalidate = invalidation_context.should_invalidate()
703 if should_invalidate:
704 _dummy_func.refresh('some-key-2')
705 else:
706 _dummy_func('some-key-2')
708 cache_state_uid = invalidation_context.state_uid
709 cache_func = cache_generator(cache_state_uid)
710 cache_func(1, 2, 3)
707 711
708 712
709 713 @pytest.mark.parametrize('execution_number', range(5))
710 714 def test_cache_invalidation_race_condition(execution_number, baseapp):
711 import time
712 715
713 716 repo_id = 777
714 717
715 cache_namespace_uid = 'cache_repo_instance.{}_{}'.format(
716 repo_id, CacheKey.CACHE_TYPE_FEED)
717 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
718 repo_id=repo_id)
719 region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid)
718 region = rc_cache.get_or_create_region('cache_repo_longterm')
719 repo_namespace_key = CacheKey.REPO_INVALIDATION_NAMESPACE.format(repo_id=repo_id)
720 720
721 721 @run_test_concurrently(25)
722 722 def test_create_and_delete_cache_keys():
723 723 time.sleep(0.2)
724 724
725 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid)
726 def _dummy_func(cache_key):
727 val = 'async'
728 return 'result:{}'.format(val)
725 def cache_generator(_state_uid):
729 726
730 inv_context_manager = rc_cache.InvalidationContext(
731 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace)
727 @region.conditional_cache_on_arguments(namespace=f'some-common-namespace-{repo_id}')
728 def _dummy_func(*args):
729 return _state_uid, 'result:async'
730
731 return _dummy_func
732
733 inv_context_manager = rc_cache.InvalidationContext(key=repo_namespace_key)
732 734
733 735 # 1st call, fresh caches
734 736 with inv_context_manager as invalidation_context:
735 should_invalidate = invalidation_context.should_invalidate()
737 cache_state_uid = invalidation_context.state_uid
738 cache_func = cache_generator(cache_state_uid)
739 previous_state_uid, result = cache_func('doo')
740
741 should_invalidate = previous_state_uid != cache_state_uid
736 742 if should_invalidate:
737 _dummy_func.refresh('some-key-3')
738 else:
739 _dummy_func('some-key-3')
743 _, result = cache_func.refresh('doo')
740 744
741 745 # Mark invalidation
742 CacheKey.set_invalidate(invalidation_namespace)
746 CacheKey.set_invalidate(repo_namespace_key)
743 747
744 748 test_create_and_delete_cache_keys()
@@ -1,375 +1,373 b''
1 1
2 2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 3 #
4 4 # This program is free software: you can redistribute it and/or modify
5 5 # it under the terms of the GNU Affero General Public License, version 3
6 6 # (only), as published by the Free Software Foundation.
7 7 #
8 8 # This program is distributed in the hope that it will be useful,
9 9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 11 # GNU General Public License for more details.
12 12 #
13 13 # You should have received a copy of the GNU Affero General Public License
14 14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 15 #
16 16 # This program is dual-licensed. If you wish to learn more about the
17 17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 19
20 20 """
21 21 Test suite for making push/pull operations, on specially modified INI files
22 22
23 23 .. important::
24 24
25 25 You must have git >= 1.8.5 for tests to work fine. With 68b939b git started
26 26 to redirect things to stderr instead of stdout.
27 27 """
28 28
29 29
30 30 import time
31 31 import logging
32 32
33 33 import pytest
34 34
35 35 from rhodecode.lib import rc_cache
36 36 from rhodecode.model.auth_token import AuthTokenModel
37 37 from rhodecode.model.db import Repository, UserIpMap, CacheKey
38 38 from rhodecode.model.meta import Session
39 39 from rhodecode.model.repo import RepoModel
40 40 from rhodecode.model.user import UserModel
41 41 from rhodecode.tests import (GIT_REPO, HG_REPO, TEST_USER_ADMIN_LOGIN)
42 42 from rhodecode.tests.utils import assert_message_in_log
43 43
44 44 from rhodecode.tests.vcs_operations import (
45 45 Command, _check_proper_clone, _check_proper_git_push,
46 46 _add_files_and_push, HG_REPO_WITH_GROUP, GIT_REPO_WITH_GROUP)
47 47
48 48
49 49 @pytest.mark.usefixtures("disable_locking", "disable_anonymous_user")
50 50 class TestVCSOperations(object):
51 51
52 52 def test_clone_hg_repo_by_admin(self, rc_web_server, tmpdir):
53 53 clone_url = rc_web_server.repo_clone_url(HG_REPO)
54 54 stdout, stderr = Command('/tmp').execute(
55 55 'hg clone', clone_url, tmpdir.strpath)
56 56 _check_proper_clone(stdout, stderr, 'hg')
57 57
58 58 def test_clone_hg_repo_by_admin_pull_protocol(self, rc_web_server, tmpdir):
59 59 clone_url = rc_web_server.repo_clone_url(HG_REPO)
60 60 stdout, stderr = Command('/tmp').execute(
61 61 'hg clone --pull', clone_url, tmpdir.strpath)
62 62 _check_proper_clone(stdout, stderr, 'hg')
63 63
64 64 def test_clone_hg_repo_by_admin_pull_stream_protocol(self, rc_web_server, tmpdir):
65 65 clone_url = rc_web_server.repo_clone_url(HG_REPO)
66 66 stdout, stderr = Command('/tmp').execute(
67 67 'hg clone --pull --stream', clone_url, tmpdir.strpath)
68 68 assert 'files to transfer,' in stdout
69 69 assert 'transferred 1.' in stdout
70 70 assert '114 files updated,' in stdout
71 71
72 72 def test_clone_git_repo_by_admin(self, rc_web_server, tmpdir):
73 73 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
74 74 cmd = Command('/tmp')
75 75 stdout, stderr = cmd.execute('git clone', clone_url, tmpdir.strpath)
76 76 _check_proper_clone(stdout, stderr, 'git')
77 77 cmd.assert_returncode_success()
78 78
79 79 def test_clone_git_repo_by_admin_with_git_suffix(self, rc_web_server, tmpdir):
80 80 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
81 81 cmd = Command('/tmp')
82 82 stdout, stderr = cmd.execute('git clone', clone_url+".git", tmpdir.strpath)
83 83 _check_proper_clone(stdout, stderr, 'git')
84 84 cmd.assert_returncode_success()
85 85
86 86 def test_clone_hg_repo_by_id_by_admin(self, rc_web_server, tmpdir):
87 87 repo_id = Repository.get_by_repo_name(HG_REPO).repo_id
88 88 clone_url = rc_web_server.repo_clone_url('_%s' % repo_id)
89 89 stdout, stderr = Command('/tmp').execute(
90 90 'hg clone', clone_url, tmpdir.strpath)
91 91 _check_proper_clone(stdout, stderr, 'hg')
92 92
93 93 def test_clone_git_repo_by_id_by_admin(self, rc_web_server, tmpdir):
94 94 repo_id = Repository.get_by_repo_name(GIT_REPO).repo_id
95 95 clone_url = rc_web_server.repo_clone_url('_%s' % repo_id)
96 96 cmd = Command('/tmp')
97 97 stdout, stderr = cmd.execute('git clone', clone_url, tmpdir.strpath)
98 98 _check_proper_clone(stdout, stderr, 'git')
99 99 cmd.assert_returncode_success()
100 100
101 101 def test_clone_hg_repo_with_group_by_admin(self, rc_web_server, tmpdir):
102 102 clone_url = rc_web_server.repo_clone_url(HG_REPO_WITH_GROUP)
103 103 stdout, stderr = Command('/tmp').execute(
104 104 'hg clone', clone_url, tmpdir.strpath)
105 105 _check_proper_clone(stdout, stderr, 'hg')
106 106
107 107 def test_clone_git_repo_with_group_by_admin(self, rc_web_server, tmpdir):
108 108 clone_url = rc_web_server.repo_clone_url(GIT_REPO_WITH_GROUP)
109 109 cmd = Command('/tmp')
110 110 stdout, stderr = cmd.execute('git clone', clone_url, tmpdir.strpath)
111 111 _check_proper_clone(stdout, stderr, 'git')
112 112 cmd.assert_returncode_success()
113 113
114 114 def test_clone_git_repo_shallow_by_admin(self, rc_web_server, tmpdir):
115 115 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
116 116 cmd = Command('/tmp')
117 117 stdout, stderr = cmd.execute(
118 118 'git clone --depth=1', clone_url, tmpdir.strpath)
119 119
120 120 assert '' == stdout
121 121 assert 'Cloning into' in stderr
122 122 cmd.assert_returncode_success()
123 123
124 124 def test_clone_wrong_credentials_hg(self, rc_web_server, tmpdir):
125 125 clone_url = rc_web_server.repo_clone_url(HG_REPO, passwd='bad!')
126 126 stdout, stderr = Command('/tmp').execute(
127 127 'hg clone', clone_url, tmpdir.strpath)
128 128 assert 'abort: authorization failed' in stderr
129 129
130 130 def test_clone_wrong_credentials_git(self, rc_web_server, tmpdir):
131 131 clone_url = rc_web_server.repo_clone_url(GIT_REPO, passwd='bad!')
132 132 stdout, stderr = Command('/tmp').execute(
133 133 'git clone', clone_url, tmpdir.strpath)
134 134 assert 'fatal: Authentication failed' in stderr
135 135
136 136 def test_clone_git_dir_as_hg(self, rc_web_server, tmpdir):
137 137 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
138 138 stdout, stderr = Command('/tmp').execute(
139 139 'hg clone', clone_url, tmpdir.strpath)
140 140 assert 'HTTP Error 404: Not Found' in stderr
141 141
142 142 def test_clone_hg_repo_as_git(self, rc_web_server, tmpdir):
143 143 clone_url = rc_web_server.repo_clone_url(HG_REPO)
144 144 stdout, stderr = Command('/tmp').execute(
145 145 'git clone', clone_url, tmpdir.strpath)
146 146 assert 'not found' in stderr
147 147
148 148 def test_clone_non_existing_path_hg(self, rc_web_server, tmpdir):
149 149 clone_url = rc_web_server.repo_clone_url('trololo')
150 150 stdout, stderr = Command('/tmp').execute(
151 151 'hg clone', clone_url, tmpdir.strpath)
152 152 assert 'HTTP Error 404: Not Found' in stderr
153 153
154 154 def test_clone_non_existing_path_git(self, rc_web_server, tmpdir):
155 155 clone_url = rc_web_server.repo_clone_url('trololo')
156 156 stdout, stderr = Command('/tmp').execute('git clone', clone_url)
157 157 assert 'not found' in stderr
158 158
159 159 def test_clone_hg_with_slashes(self, rc_web_server, tmpdir):
160 160 clone_url = rc_web_server.repo_clone_url('//' + HG_REPO)
161 161 stdout, stderr = Command('/tmp').execute('hg clone', clone_url, tmpdir.strpath)
162 162 assert 'HTTP Error 404: Not Found' in stderr
163 163
164 164 def test_clone_git_with_slashes(self, rc_web_server, tmpdir):
165 165 clone_url = rc_web_server.repo_clone_url('//' + GIT_REPO)
166 166 stdout, stderr = Command('/tmp').execute('git clone', clone_url)
167 167 assert 'not found' in stderr
168 168
169 169 def test_clone_existing_path_hg_not_in_database(
170 170 self, rc_web_server, tmpdir, fs_repo_only):
171 171
172 172 db_name = fs_repo_only('not-in-db-hg', repo_type='hg')
173 173 clone_url = rc_web_server.repo_clone_url(db_name)
174 174 stdout, stderr = Command('/tmp').execute(
175 175 'hg clone', clone_url, tmpdir.strpath)
176 176 assert 'HTTP Error 404: Not Found' in stderr
177 177
178 178 def test_clone_existing_path_git_not_in_database(
179 179 self, rc_web_server, tmpdir, fs_repo_only):
180 180 db_name = fs_repo_only('not-in-db-git', repo_type='git')
181 181 clone_url = rc_web_server.repo_clone_url(db_name)
182 182 stdout, stderr = Command('/tmp').execute(
183 183 'git clone', clone_url, tmpdir.strpath)
184 184 assert 'not found' in stderr
185 185
186 186 def test_clone_existing_path_hg_not_in_database_different_scm(
187 187 self, rc_web_server, tmpdir, fs_repo_only):
188 188 db_name = fs_repo_only('not-in-db-git', repo_type='git')
189 189 clone_url = rc_web_server.repo_clone_url(db_name)
190 190 stdout, stderr = Command('/tmp').execute(
191 191 'hg clone', clone_url, tmpdir.strpath)
192 192 assert 'HTTP Error 404: Not Found' in stderr
193 193
194 194 def test_clone_existing_path_git_not_in_database_different_scm(
195 195 self, rc_web_server, tmpdir, fs_repo_only):
196 196 db_name = fs_repo_only('not-in-db-hg', repo_type='hg')
197 197 clone_url = rc_web_server.repo_clone_url(db_name)
198 198 stdout, stderr = Command('/tmp').execute(
199 199 'git clone', clone_url, tmpdir.strpath)
200 200 assert 'not found' in stderr
201 201
202 202 def test_clone_non_existing_store_path_hg(self, rc_web_server, tmpdir, user_util):
203 203 repo = user_util.create_repo()
204 204 clone_url = rc_web_server.repo_clone_url(repo.repo_name)
205 205
206 206 # Damage repo by removing it's folder
207 207 RepoModel()._delete_filesystem_repo(repo)
208 208
209 209 stdout, stderr = Command('/tmp').execute(
210 210 'hg clone', clone_url, tmpdir.strpath)
211 211 assert 'HTTP Error 404: Not Found' in stderr
212 212
213 213 def test_clone_non_existing_store_path_git(self, rc_web_server, tmpdir, user_util):
214 214 repo = user_util.create_repo(repo_type='git')
215 215 clone_url = rc_web_server.repo_clone_url(repo.repo_name)
216 216
217 217 # Damage repo by removing it's folder
218 218 RepoModel()._delete_filesystem_repo(repo)
219 219
220 220 stdout, stderr = Command('/tmp').execute(
221 221 'git clone', clone_url, tmpdir.strpath)
222 222 assert 'not found' in stderr
223 223
224 224 def test_push_new_file_hg(self, rc_web_server, tmpdir):
225 225 clone_url = rc_web_server.repo_clone_url(HG_REPO)
226 226 stdout, stderr = Command('/tmp').execute(
227 227 'hg clone', clone_url, tmpdir.strpath)
228 228
229 229 stdout, stderr = _add_files_and_push(
230 230 'hg', tmpdir.strpath, clone_url=clone_url)
231 231
232 232 assert 'pushing to' in stdout
233 233 assert 'size summary' in stdout
234 234
235 235 def test_push_new_file_git(self, rc_web_server, tmpdir):
236 236 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
237 237 stdout, stderr = Command('/tmp').execute(
238 238 'git clone', clone_url, tmpdir.strpath)
239 239
240 240 # commit some stuff into this repo
241 241 stdout, stderr = _add_files_and_push(
242 242 'git', tmpdir.strpath, clone_url=clone_url)
243 243
244 244 _check_proper_git_push(stdout, stderr)
245 245
246 246 def test_push_invalidates_cache(self, rc_web_server, tmpdir):
247 247 hg_repo = Repository.get_by_repo_name(HG_REPO)
248 248
249 249 # init cache objects
250 250 CacheKey.delete_all_cache()
251 251 cache_namespace_uid = 'cache_push_test.{}'.format(hg_repo.repo_id)
252 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
253 repo_id=hg_repo.repo_id)
252 repo_namespace_key = CacheKey.REPO_INVALIDATION_NAMESPACE.format(repo_id=hg_repo.repo_id)
254 253
255 inv_context_manager = rc_cache.InvalidationContext(
256 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace)
254 inv_context_manager = rc_cache.InvalidationContext(key=repo_namespace_key)
257 255
258 256 with inv_context_manager as invalidation_context:
259 257 # __enter__ will create and register cache objects
260 258 pass
261 259
262 260 # clone to init cache
263 261 clone_url = rc_web_server.repo_clone_url(hg_repo.repo_name)
264 262 stdout, stderr = Command('/tmp').execute(
265 263 'hg clone', clone_url, tmpdir.strpath)
266 264
267 265 cache_keys = hg_repo.cache_keys
268 266 assert cache_keys != []
269 267 for key in cache_keys:
270 268 assert key.cache_active is True
271 269
272 270 # PUSH that should trigger invalidation cache
273 271 stdout, stderr = _add_files_and_push(
274 272 'hg', tmpdir.strpath, clone_url=clone_url, files_no=1)
275 273
276 274 # flush...
277 275 Session().commit()
278 276 hg_repo = Repository.get_by_repo_name(HG_REPO)
279 277 cache_keys = hg_repo.cache_keys
280 278 assert cache_keys != []
281 279 for key in cache_keys:
282 280 # keys should be marked as not active
283 281 assert key.cache_active is False
284 282
285 283 def test_push_wrong_credentials_hg(self, rc_web_server, tmpdir):
286 284 clone_url = rc_web_server.repo_clone_url(HG_REPO)
287 285 stdout, stderr = Command('/tmp').execute(
288 286 'hg clone', clone_url, tmpdir.strpath)
289 287
290 288 push_url = rc_web_server.repo_clone_url(
291 289 HG_REPO, user='bad', passwd='name')
292 290 stdout, stderr = _add_files_and_push(
293 291 'hg', tmpdir.strpath, clone_url=push_url)
294 292
295 293 assert 'abort: authorization failed' in stderr
296 294
297 295 def test_push_wrong_credentials_git(self, rc_web_server, tmpdir):
298 296 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
299 297 stdout, stderr = Command('/tmp').execute(
300 298 'git clone', clone_url, tmpdir.strpath)
301 299
302 300 push_url = rc_web_server.repo_clone_url(
303 301 GIT_REPO, user='bad', passwd='name')
304 302 stdout, stderr = _add_files_and_push(
305 303 'git', tmpdir.strpath, clone_url=push_url)
306 304
307 305 assert 'fatal: Authentication failed' in stderr
308 306
309 307 def test_push_back_to_wrong_url_hg(self, rc_web_server, tmpdir):
310 308 clone_url = rc_web_server.repo_clone_url(HG_REPO)
311 309 stdout, stderr = Command('/tmp').execute(
312 310 'hg clone', clone_url, tmpdir.strpath)
313 311
314 312 stdout, stderr = _add_files_and_push(
315 313 'hg', tmpdir.strpath,
316 314 clone_url=rc_web_server.repo_clone_url('not-existing'))
317 315
318 316 assert 'HTTP Error 404: Not Found' in stderr
319 317
320 318 def test_push_back_to_wrong_url_git(self, rc_web_server, tmpdir):
321 319 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
322 320 stdout, stderr = Command('/tmp').execute(
323 321 'git clone', clone_url, tmpdir.strpath)
324 322
325 323 stdout, stderr = _add_files_and_push(
326 324 'git', tmpdir.strpath,
327 325 clone_url=rc_web_server.repo_clone_url('not-existing'))
328 326
329 327 assert 'not found' in stderr
330 328
331 329 def test_ip_restriction_hg(self, rc_web_server, tmpdir):
332 330 user_model = UserModel()
333 331 try:
334 332 user_model.add_extra_ip(TEST_USER_ADMIN_LOGIN, '10.10.10.10/32')
335 333 Session().commit()
336 334 time.sleep(2)
337 335 clone_url = rc_web_server.repo_clone_url(HG_REPO)
338 336 stdout, stderr = Command('/tmp').execute(
339 337 'hg clone', clone_url, tmpdir.strpath)
340 338 assert 'abort: HTTP Error 403: Forbidden' in stderr
341 339 finally:
342 340 # release IP restrictions
343 341 for ip in UserIpMap.getAll():
344 342 UserIpMap.delete(ip.ip_id)
345 343 Session().commit()
346 344
347 345 time.sleep(2)
348 346
349 347 stdout, stderr = Command('/tmp').execute(
350 348 'hg clone', clone_url, tmpdir.strpath)
351 349 _check_proper_clone(stdout, stderr, 'hg')
352 350
353 351 def test_ip_restriction_git(self, rc_web_server, tmpdir):
354 352 user_model = UserModel()
355 353 try:
356 354 user_model.add_extra_ip(TEST_USER_ADMIN_LOGIN, '10.10.10.10/32')
357 355 Session().commit()
358 356 time.sleep(2)
359 357 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
360 358 stdout, stderr = Command('/tmp').execute(
361 359 'git clone', clone_url, tmpdir.strpath)
362 360 msg = "The requested URL returned error: 403"
363 361 assert msg in stderr
364 362 finally:
365 363 # release IP restrictions
366 364 for ip in UserIpMap.getAll():
367 365 UserIpMap.delete(ip.ip_id)
368 366 Session().commit()
369 367
370 368 time.sleep(2)
371 369
372 370 cmd = Command('/tmp')
373 371 stdout, stderr = cmd.execute('git clone', clone_url, tmpdir.strpath)
374 372 cmd.assert_returncode_success()
375 373 _check_proper_clone(stdout, stderr, 'git')
General Comments 0
You need to be logged in to leave comments. Login now