##// END OF EJS Templates
feature(caches): refactor how invalidationContext works, fixes many issues with the previousl solution...
super-admin -
r5288:c652fe5b default
parent child Browse files
Show More
@@ -1,120 +1,119 b''
1 # Copyright (C) 2015-2023 RhodeCode GmbH
1 # Copyright (C) 2015-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 import logging
19 import logging
20 import threading
20 import threading
21
21
22 from dogpile.cache import register_backend
22 from dogpile.cache import register_backend
23
23
24 from . import region_meta
24 from . import region_meta
25 from .utils import (
25 from .utils import (
26 ActiveRegionCache,
26 ActiveRegionCache,
27 FreshRegionCache,
28 InvalidationContext,
27 InvalidationContext,
29 backend_key_generator,
28 backend_key_generator,
30 clear_cache_namespace,
29 clear_cache_namespace,
31 get_default_cache_settings,
30 get_default_cache_settings,
32 get_or_create_region,
31 get_or_create_region,
33 make_region,
32 make_region,
34 str2bool,
33 str2bool,
35 )
34 )
36
35
37 module_name = 'rhodecode'
36 module_name = 'rhodecode'
38
37
39 register_backend(
38 register_backend(
40 "dogpile.cache.rc.memory_lru", f"{module_name}.lib.rc_cache.backends",
39 "dogpile.cache.rc.memory_lru", f"{module_name}.lib.rc_cache.backends",
41 "LRUMemoryBackend")
40 "LRUMemoryBackend")
42
41
43 register_backend(
42 register_backend(
44 "dogpile.cache.rc.file_namespace", f"{module_name}.lib.rc_cache.backends",
43 "dogpile.cache.rc.file_namespace", f"{module_name}.lib.rc_cache.backends",
45 "FileNamespaceBackend")
44 "FileNamespaceBackend")
46
45
47 register_backend(
46 register_backend(
48 "dogpile.cache.rc.redis", f"{module_name}.lib.rc_cache.backends",
47 "dogpile.cache.rc.redis", f"{module_name}.lib.rc_cache.backends",
49 "RedisPickleBackend")
48 "RedisPickleBackend")
50
49
51 register_backend(
50 register_backend(
52 "dogpile.cache.rc.redis_msgpack", f"{module_name}.lib.rc_cache.backends",
51 "dogpile.cache.rc.redis_msgpack", f"{module_name}.lib.rc_cache.backends",
53 "RedisMsgPackBackend")
52 "RedisMsgPackBackend")
54
53
55
54
56 log = logging.getLogger(__name__)
55 log = logging.getLogger(__name__)
57
56
58
57
59 FILE_TREE_CACHE_VER = 'v5'
58 FILE_TREE_CACHE_VER = 'v5'
60 LICENSE_CACHE_VER = 'v3'
59 LICENSE_CACHE_VER = 'v3'
61 PERMISSIONS_CACHE_VER = 'v2'
60 PERMISSIONS_CACHE_VER = 'v2'
62
61
63 CLEAR_DELETE = 'delete'
62 CLEAR_DELETE = 'delete'
64 CLEAR_INVALIDATE = 'invalidate'
63 CLEAR_INVALIDATE = 'invalidate'
65
64
66
65
67 def async_creation_runner(cache, cache_key, creator, mutex):
66 def async_creation_runner(cache, cache_key, creator, mutex):
68
67
69 def runner():
68 def runner():
70 try:
69 try:
71 value = creator()
70 value = creator()
72 cache.set(cache_key, value)
71 cache.set(cache_key, value)
73 finally:
72 finally:
74 mutex.release()
73 mutex.release()
75
74
76 thread = threading.Thread(target=runner)
75 thread = threading.Thread(target=runner)
77 thread.start()
76 thread.start()
78
77
79
78
80 def configure_dogpile_cache(settings):
79 def configure_dogpile_cache(settings):
81 cache_dir = settings.get('cache_dir')
80 cache_dir = settings.get('cache_dir')
82 if cache_dir:
81 if cache_dir:
83 region_meta.dogpile_config_defaults['cache_dir'] = cache_dir
82 region_meta.dogpile_config_defaults['cache_dir'] = cache_dir
84
83
85 rc_cache_data = get_default_cache_settings(settings, prefixes=['rc_cache.'])
84 rc_cache_data = get_default_cache_settings(settings, prefixes=['rc_cache.'])
86
85
87 # inspect available namespaces
86 # inspect available namespaces
88 avail_regions = set()
87 avail_regions = set()
89 for key in rc_cache_data.keys():
88 for key in rc_cache_data.keys():
90 namespace_name = key.split('.', 1)[0]
89 namespace_name = key.split('.', 1)[0]
91 if namespace_name in avail_regions:
90 if namespace_name in avail_regions:
92 continue
91 continue
93
92
94 avail_regions.add(namespace_name)
93 avail_regions.add(namespace_name)
95 log.debug('dogpile: found following cache regions: %s', namespace_name)
94 log.debug('dogpile: found following cache regions: %s', namespace_name)
96
95
97 new_region = make_region(
96 new_region = make_region(
98 name=namespace_name,
97 name=namespace_name,
99 function_key_generator=None,
98 function_key_generator=None,
100 async_creation_runner=None
99 async_creation_runner=None
101 )
100 )
102
101
103 new_region.configure_from_config(settings, f'rc_cache.{namespace_name}.')
102 new_region.configure_from_config(settings, f'rc_cache.{namespace_name}.')
104 new_region.function_key_generator = backend_key_generator(new_region.actual_backend)
103 new_region.function_key_generator = backend_key_generator(new_region.actual_backend)
105
104
106 async_creator = str2bool(settings.pop(f'rc_cache.{namespace_name}.async_creator', 'false'))
105 async_creator = str2bool(settings.pop(f'rc_cache.{namespace_name}.async_creator', 'false'))
107 if async_creator:
106 if async_creator:
108 log.debug('configuring region %s with async creator', new_region)
107 log.debug('configuring region %s with async creator', new_region)
109 new_region.async_creation_runner = async_creation_runner
108 new_region.async_creation_runner = async_creation_runner
110
109
111 if log.isEnabledFor(logging.DEBUG):
110 if log.isEnabledFor(logging.DEBUG):
112 region_args = dict(backend=new_region.actual_backend,
111 region_args = dict(backend=new_region.actual_backend,
113 region_invalidator=new_region.region_invalidator.__class__)
112 region_invalidator=new_region.region_invalidator.__class__)
114 log.debug('dogpile: registering a new region key=`%s` args=%s', namespace_name, region_args)
113 log.debug('dogpile: registering a new region key=`%s` args=%s', namespace_name, region_args)
115
114
116 region_meta.dogpile_cache_regions[namespace_name] = new_region
115 region_meta.dogpile_cache_regions[namespace_name] = new_region
117
116
118
117
119 def includeme(config):
118 def includeme(config):
120 configure_dogpile_cache(config.registry.settings)
119 configure_dogpile_cache(config.registry.settings)
@@ -1,403 +1,358 b''
1 # Copyright (C) 2015-2023 RhodeCode GmbH
1 # Copyright (C) 2015-2024 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 import functools
19 import functools
20 import logging
20 import logging
21 import os
21 import os
22 import threading
22 import threading
23 import time
23 import time
24
24
25 import decorator
25 import decorator
26 from dogpile.cache import CacheRegion
26 from dogpile.cache import CacheRegion
27
27
28 import rhodecode
28 import rhodecode
29 from rhodecode.lib.hash_utils import sha1
29 from rhodecode.lib.hash_utils import sha1
30 from rhodecode.lib.str_utils import safe_bytes
30 from rhodecode.lib.str_utils import safe_bytes
31 from rhodecode.lib.type_utils import str2bool # noqa :required by imports from .utils
31 from rhodecode.lib.type_utils import str2bool # noqa :required by imports from .utils
32
32
33 from . import region_meta
33 from . import region_meta
34
34
35 log = logging.getLogger(__name__)
35 log = logging.getLogger(__name__)
36
36
37
37
38 def isCython(func):
38 def isCython(func):
39 """
39 """
40 Private helper that checks if a function is a cython function.
40 Private helper that checks if a function is a cython function.
41 """
41 """
42 return func.__class__.__name__ == 'cython_function_or_method'
42 return func.__class__.__name__ == 'cython_function_or_method'
43
43
44
44
45 class RhodeCodeCacheRegion(CacheRegion):
45 class RhodeCodeCacheRegion(CacheRegion):
46
46
47 def __repr__(self):
47 def __repr__(self):
48 return f'{self.__class__}(name={self.name})'
48 return f'{self.__class__}(name={self.name})'
49
49
50 def conditional_cache_on_arguments(
50 def conditional_cache_on_arguments(
51 self, namespace=None,
51 self, namespace=None,
52 expiration_time=None,
52 expiration_time=None,
53 should_cache_fn=None,
53 should_cache_fn=None,
54 to_str=str,
54 to_str=str,
55 function_key_generator=None,
55 function_key_generator=None,
56 condition=True):
56 condition=True):
57 """
57 """
58 Custom conditional decorator, that will not touch any dogpile internals if
58 Custom conditional decorator, that will not touch any dogpile internals if
59 condition isn't meet. This works a bit different from should_cache_fn
59 condition isn't meet. This works a bit different from should_cache_fn
60 And it's faster in cases we don't ever want to compute cached values
60 And it's faster in cases we don't ever want to compute cached values
61 """
61 """
62 expiration_time_is_callable = callable(expiration_time)
62 expiration_time_is_callable = callable(expiration_time)
63 if not namespace:
63 if not namespace:
64 namespace = getattr(self, '_default_namespace', None)
64 namespace = getattr(self, '_default_namespace', None)
65
65
66 if function_key_generator is None:
66 if function_key_generator is None:
67 function_key_generator = self.function_key_generator
67 function_key_generator = self.function_key_generator
68
68
69 def get_or_create_for_user_func(func_key_generator, user_func, *arg, **kw):
69 def get_or_create_for_user_func(func_key_generator, user_func, *arg, **kw):
70
70
71 if not condition:
71 if not condition:
72 log.debug('Calling un-cached method:%s', user_func.__name__)
72 log.debug('Calling un-cached method:%s', user_func.__name__)
73 start = time.time()
73 start = time.time()
74 result = user_func(*arg, **kw)
74 result = user_func(*arg, **kw)
75 total = time.time() - start
75 total = time.time() - start
76 log.debug('un-cached method:%s took %.4fs', user_func.__name__, total)
76 log.debug('un-cached method:%s took %.4fs', user_func.__name__, total)
77 return result
77 return result
78
78
79 key = func_key_generator(*arg, **kw)
79 key = func_key_generator(*arg, **kw)
80
80
81 timeout = expiration_time() if expiration_time_is_callable \
81 timeout = expiration_time() if expiration_time_is_callable \
82 else expiration_time
82 else expiration_time
83
83
84 log.debug('Calling cached method:`%s`', user_func.__name__)
84 log.debug('Calling cached method:`%s`', user_func.__name__)
85 return self.get_or_create(key, user_func, timeout, should_cache_fn, (arg, kw))
85 return self.get_or_create(key, user_func, timeout, should_cache_fn, (arg, kw))
86
86
87 def cache_decorator(user_func):
87 def cache_decorator(user_func):
88 if to_str is str:
88 if to_str is str:
89 # backwards compatible
89 # backwards compatible
90 key_generator = function_key_generator(namespace, user_func)
90 key_generator = function_key_generator(namespace, user_func)
91 else:
91 else:
92 key_generator = function_key_generator(namespace, user_func, to_str=to_str)
92 key_generator = function_key_generator(namespace, user_func, to_str=to_str)
93
93
94 def refresh(*arg, **kw):
94 def refresh(*arg, **kw):
95 """
95 """
96 Like invalidate, but regenerates the value instead
96 Like invalidate, but regenerates the value instead
97 """
97 """
98 key = key_generator(*arg, **kw)
98 key = key_generator(*arg, **kw)
99 value = user_func(*arg, **kw)
99 value = user_func(*arg, **kw)
100 self.set(key, value)
100 self.set(key, value)
101 return value
101 return value
102
102
103 def invalidate(*arg, **kw):
103 def invalidate(*arg, **kw):
104 key = key_generator(*arg, **kw)
104 key = key_generator(*arg, **kw)
105 self.delete(key)
105 self.delete(key)
106
106
107 def set_(value, *arg, **kw):
107 def set_(value, *arg, **kw):
108 key = key_generator(*arg, **kw)
108 key = key_generator(*arg, **kw)
109 self.set(key, value)
109 self.set(key, value)
110
110
111 def get(*arg, **kw):
111 def get(*arg, **kw):
112 key = key_generator(*arg, **kw)
112 key = key_generator(*arg, **kw)
113 return self.get(key)
113 return self.get(key)
114
114
115 user_func.set = set_
115 user_func.set = set_
116 user_func.invalidate = invalidate
116 user_func.invalidate = invalidate
117 user_func.get = get
117 user_func.get = get
118 user_func.refresh = refresh
118 user_func.refresh = refresh
119 user_func.key_generator = key_generator
119 user_func.key_generator = key_generator
120 user_func.original = user_func
120 user_func.original = user_func
121
121
122 # Use `decorate` to preserve the signature of :param:`user_func`.
122 # Use `decorate` to preserve the signature of :param:`user_func`.
123 return decorator.decorate(user_func, functools.partial(
123 return decorator.decorate(user_func, functools.partial(
124 get_or_create_for_user_func, key_generator))
124 get_or_create_for_user_func, key_generator))
125
125
126 return cache_decorator
126 return cache_decorator
127
127
128
128
129 def make_region(*arg, **kw):
129 def make_region(*arg, **kw):
130 return RhodeCodeCacheRegion(*arg, **kw)
130 return RhodeCodeCacheRegion(*arg, **kw)
131
131
132
132
133 def get_default_cache_settings(settings, prefixes=None):
133 def get_default_cache_settings(settings, prefixes=None):
134 prefixes = prefixes or []
134 prefixes = prefixes or []
135 cache_settings = {}
135 cache_settings = {}
136 for key in settings.keys():
136 for key in settings.keys():
137 for prefix in prefixes:
137 for prefix in prefixes:
138 if key.startswith(prefix):
138 if key.startswith(prefix):
139 name = key.split(prefix)[1].strip()
139 name = key.split(prefix)[1].strip()
140 val = settings[key]
140 val = settings[key]
141 if isinstance(val, str):
141 if isinstance(val, str):
142 val = val.strip()
142 val = val.strip()
143 cache_settings[name] = val
143 cache_settings[name] = val
144 return cache_settings
144 return cache_settings
145
145
146
146
147 def compute_key_from_params(*args):
147 def compute_key_from_params(*args):
148 """
148 """
149 Helper to compute key from given params to be used in cache manager
149 Helper to compute key from given params to be used in cache manager
150 """
150 """
151 return sha1(safe_bytes("_".join(map(str, args))))
151 return sha1(safe_bytes("_".join(map(str, args))))
152
152
153
153
154 def custom_key_generator(backend, namespace, fn):
154 def custom_key_generator(backend, namespace, fn):
155 func_name = fn.__name__
155 func_name = fn.__name__
156
156
157 def generate_key(*args):
157 def generate_key(*args):
158 backend_pref = getattr(backend, 'key_prefix', None) or 'backend_prefix'
158 backend_pref = getattr(backend, 'key_prefix', None) or 'backend_prefix'
159 namespace_pref = namespace or 'default_namespace'
159 namespace_pref = namespace or 'default_namespace'
160 arg_key = compute_key_from_params(*args)
160 arg_key = compute_key_from_params(*args)
161 final_key = f"{backend_pref}:{namespace_pref}:{func_name}_{arg_key}"
161 final_key = f"{backend_pref}:{namespace_pref}:{func_name}_{arg_key}"
162
162
163 return final_key
163 return final_key
164
164
165 return generate_key
165 return generate_key
166
166
167
167
168 def backend_key_generator(backend):
168 def backend_key_generator(backend):
169 """
169 """
170 Special wrapper that also sends over the backend to the key generator
170 Special wrapper that also sends over the backend to the key generator
171 """
171 """
172 def wrapper(namespace, fn):
172 def wrapper(namespace, fn):
173 return custom_key_generator(backend, namespace, fn)
173 return custom_key_generator(backend, namespace, fn)
174 return wrapper
174 return wrapper
175
175
176
176
177 def get_or_create_region(region_name, region_namespace: str = None, use_async_runner=False):
177 def get_or_create_region(region_name, region_namespace: str = None, use_async_runner=False):
178 from .backends import FileNamespaceBackend
178 from .backends import FileNamespaceBackend
179 from . import async_creation_runner
179 from . import async_creation_runner
180
180
181 region_obj = region_meta.dogpile_cache_regions.get(region_name)
181 region_obj = region_meta.dogpile_cache_regions.get(region_name)
182 if not region_obj:
182 if not region_obj:
183 reg_keys = list(region_meta.dogpile_cache_regions.keys())
183 reg_keys = list(region_meta.dogpile_cache_regions.keys())
184 raise OSError(f'Region `{region_name}` not in configured: {reg_keys}.')
184 raise OSError(f'Region `{region_name}` not in configured: {reg_keys}.')
185
185
186 region_uid_name = f'{region_name}:{region_namespace}'
186 region_uid_name = f'{region_name}:{region_namespace}'
187
187
188 # Special case for ONLY the FileNamespaceBackend backend. We register one-file-per-region
188 # Special case for ONLY the FileNamespaceBackend backend. We register one-file-per-region
189 if isinstance(region_obj.actual_backend, FileNamespaceBackend):
189 if isinstance(region_obj.actual_backend, FileNamespaceBackend):
190 if not region_namespace:
190 if not region_namespace:
191 raise ValueError(f'{FileNamespaceBackend} used requires to specify region_namespace param')
191 raise ValueError(f'{FileNamespaceBackend} used requires to specify region_namespace param')
192
192
193 region_exist = region_meta.dogpile_cache_regions.get(region_namespace)
193 region_exist = region_meta.dogpile_cache_regions.get(region_namespace)
194 if region_exist:
194 if region_exist:
195 log.debug('Using already configured region: %s', region_namespace)
195 log.debug('Using already configured region: %s', region_namespace)
196 return region_exist
196 return region_exist
197
197
198 expiration_time = region_obj.expiration_time
198 expiration_time = region_obj.expiration_time
199
199
200 cache_dir = region_meta.dogpile_config_defaults['cache_dir']
200 cache_dir = region_meta.dogpile_config_defaults['cache_dir']
201 namespace_cache_dir = cache_dir
201 namespace_cache_dir = cache_dir
202
202
203 # we default the namespace_cache_dir to our default cache dir.
203 # we default the namespace_cache_dir to our default cache dir.
204 # however, if this backend is configured with filename= param, we prioritize that
204 # however, if this backend is configured with filename= param, we prioritize that
205 # so all caches within that particular region, even those namespaced end up in the same path
205 # so all caches within that particular region, even those namespaced end up in the same path
206 if region_obj.actual_backend.filename:
206 if region_obj.actual_backend.filename:
207 namespace_cache_dir = os.path.dirname(region_obj.actual_backend.filename)
207 namespace_cache_dir = os.path.dirname(region_obj.actual_backend.filename)
208
208
209 if not os.path.isdir(namespace_cache_dir):
209 if not os.path.isdir(namespace_cache_dir):
210 os.makedirs(namespace_cache_dir)
210 os.makedirs(namespace_cache_dir)
211 new_region = make_region(
211 new_region = make_region(
212 name=region_uid_name,
212 name=region_uid_name,
213 function_key_generator=backend_key_generator(region_obj.actual_backend)
213 function_key_generator=backend_key_generator(region_obj.actual_backend)
214 )
214 )
215
215
216 namespace_filename = os.path.join(
216 namespace_filename = os.path.join(
217 namespace_cache_dir, f"{region_name}_{region_namespace}.cache_db")
217 namespace_cache_dir, f"{region_name}_{region_namespace}.cache_db")
218 # special type that allows 1db per namespace
218 # special type that allows 1db per namespace
219 new_region.configure(
219 new_region.configure(
220 backend='dogpile.cache.rc.file_namespace',
220 backend='dogpile.cache.rc.file_namespace',
221 expiration_time=expiration_time,
221 expiration_time=expiration_time,
222 arguments={"filename": namespace_filename}
222 arguments={"filename": namespace_filename}
223 )
223 )
224
224
225 # create and save in region caches
225 # create and save in region caches
226 log.debug('configuring new region: %s', region_uid_name)
226 log.debug('configuring new region: %s', region_uid_name)
227 region_obj = region_meta.dogpile_cache_regions[region_namespace] = new_region
227 region_obj = region_meta.dogpile_cache_regions[region_namespace] = new_region
228
228
229 region_obj._default_namespace = region_namespace
229 region_obj._default_namespace = region_namespace
230 if use_async_runner:
230 if use_async_runner:
231 region_obj.async_creation_runner = async_creation_runner
231 region_obj.async_creation_runner = async_creation_runner
232 return region_obj
232 return region_obj
233
233
234
234
235 def clear_cache_namespace(cache_region: str | RhodeCodeCacheRegion, cache_namespace_uid: str, method: str) -> int:
235 def clear_cache_namespace(cache_region: str | RhodeCodeCacheRegion, cache_namespace_uid: str, method: str) -> int:
236 from . import CLEAR_DELETE, CLEAR_INVALIDATE
236 from . import CLEAR_DELETE, CLEAR_INVALIDATE
237
237
238 if not isinstance(cache_region, RhodeCodeCacheRegion):
238 if not isinstance(cache_region, RhodeCodeCacheRegion):
239 cache_region = get_or_create_region(cache_region, cache_namespace_uid)
239 cache_region = get_or_create_region(cache_region, cache_namespace_uid)
240 log.debug('clearing cache region: %s [prefix:%s] with method=%s',
240 log.debug('clearing cache region: %s [prefix:%s] with method=%s',
241 cache_region, cache_namespace_uid, method)
241 cache_region, cache_namespace_uid, method)
242
242
243 num_affected_keys = 0
243 num_affected_keys = 0
244
244
245 if method == CLEAR_INVALIDATE:
245 if method == CLEAR_INVALIDATE:
246 # NOTE: The CacheRegion.invalidate() method’s default mode of
246 # NOTE: The CacheRegion.invalidate() method’s default mode of
247 # operation is to set a timestamp local to this CacheRegion in this Python process only.
247 # operation is to set a timestamp local to this CacheRegion in this Python process only.
248 # It does not impact other Python processes or regions as the timestamp is only stored locally in memory.
248 # It does not impact other Python processes or regions as the timestamp is only stored locally in memory.
249 cache_region.invalidate(hard=True)
249 cache_region.invalidate(hard=True)
250
250
251 if method == CLEAR_DELETE:
251 if method == CLEAR_DELETE:
252 num_affected_keys = cache_region.backend.delete_multi_by_prefix(prefix=cache_namespace_uid)
252 num_affected_keys = cache_region.backend.delete_multi_by_prefix(prefix=cache_namespace_uid)
253
253
254 return num_affected_keys
254 return num_affected_keys
255
255
256
256
257 class ActiveRegionCache(object):
257 class ActiveRegionCache(object):
258 def __init__(self, context, cache_data):
258 def __init__(self, context, cache_data: dict):
259 self.context = context
259 self.context = context
260 self.cache_data = cache_data
260 self.cache_data = cache_data
261
261
262 def should_invalidate(self):
262 @property
263 return False
263 def state_uid(self) -> str:
264
264 return self.cache_data['cache_state_uid']
265
266 class FreshRegionCache(object):
267 def __init__(self, context, cache_data):
268 self.context = context
269 self.cache_data = cache_data
270
271 def should_invalidate(self):
272 return True
273
265
274
266
275 class InvalidationContext(object):
267 class InvalidationContext(object):
276 """
268 """
277 usage::
269 usage::
278
270
279 from rhodecode.lib import rc_cache
271 from rhodecode.lib import rc_cache
280
272
281 cache_namespace_uid = CacheKey.SOME_NAMESPACE.format(1)
273 repo_namespace_key = 'some-cache-for-repo-id-100'
282 region = rc_cache.get_or_create_region('some_region', cache_namespace_uid)
274 inv_context_manager = rc_cache.InvalidationContext(key=repo_namespace_key)
275
276 def cache_generator(_state_uid):
283
277
284 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=True)
278 @region.conditional_cache_on_arguments(namespace='some-common-namespace-100')
285 def heavy_compute(cache_name, param1, param2):
279 def _dummy_func(*args):
286 print('COMPUTE {}, {}, {}'.format(cache_name, param1, param2))
280 # compute heavy function
287
281 return _state_uid, 'result'
288 # invalidation namespace is shared namespace key for all process caches
289 # we use it to send a global signal
290 invalidation_namespace = 'repo_cache:1'
291
282
292 inv_context_manager = rc_cache.InvalidationContext(
283 return _dummy_func
293 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace)
284
294 with inv_context_manager as invalidation_context:
285 with inv_context_manager as invalidation_context:
295 args = ('one', 'two')
286 cache_state_uid = invalidation_context.state_uid
296 # re-compute and store cache if we get invalidate signal
287 cache_func = cache_generator(cache_state_uid)
297 if invalidation_context.should_invalidate():
288 previous_state_uid, result = cache_func(*call_args)
298 result = heavy_compute.refresh(*args)
299 else:
300 result = heavy_compute(*args)
301
289
302 compute_time = inv_context_manager.compute_time
290 should_invalidate = previous_state_uid != cache_state_uid
303 log.debug('result computed in %.4fs', compute_time)
291 if should_invalidate:
292 _, result = cache_func.refresh(*call_args)
304
293
305 # To send global invalidation signal, simply run
294 # To send global invalidation signal, simply run
306 CacheKey.set_invalidate(invalidation_namespace)
295 CacheKey.set_invalidate(repo_namespace_key)
307
296
308 """
297 """
309
298
310 def __repr__(self):
299 def __repr__(self):
311 return f'<InvalidationContext:{self.cache_key}[{self.uid}]>'
300 return f'<InvalidationContext:{self.cache_key}>'
312
301
313 def __init__(self, uid, invalidation_namespace='',
302 def __init__(self, key, raise_exception=False, thread_scoped=None):
314 raise_exception=False, thread_scoped=None):
303 self.cache_key = key
315 self.uid = uid
304
316 self.invalidation_namespace = invalidation_namespace
317 self.raise_exception = raise_exception
305 self.raise_exception = raise_exception
318 self.proc_id = rhodecode.CONFIG.get('instance_id') or 'DEFAULT'
306 self.proc_id = rhodecode.ConfigGet().get_str('instance_id') or 'DEFAULT'
319 self.thread_id = 'global'
307 self.thread_id = 'global'
320
308
321 if thread_scoped is None:
309 if thread_scoped is None:
322 # if we set "default" we can override this via .ini settings
310 # if we set "default" we can override this via .ini settings
323 thread_scoped = rhodecode.ConfigGet().get_bool('cache_thread_scoped')
311 thread_scoped = rhodecode.ConfigGet().get_bool('cache_thread_scoped')
324
312
325 # Append the thread id to the cache key if this invalidation context
313 # Append the thread id to the cache key if this invalidation context
326 # should be scoped to the current thread.
314 # should be scoped to the current thread.
327 if thread_scoped is True:
315 if thread_scoped is True:
328 self.thread_id = threading.current_thread().ident
316 self.thread_id = threading.current_thread().ident
329
317
330 self.cache_key = compute_key_from_params(uid)
318 self.proc_key = f'proc:{self.proc_id}|thread:{self.thread_id}|key:{self.cache_key}'
331 self.cache_key = 'proc:{}|thread:{}|params:{}'.format(
332 self.proc_id, self.thread_id, self.cache_key)
333 self.proc_key = f'proc:{self.proc_id}'
334 self.compute_time = 0
319 self.compute_time = 0
335
320
336 def get_or_create_cache_obj(self, cache_type, invalidation_namespace=''):
321 def get_or_create_cache_obj(self):
337 from rhodecode.model.db import CacheKey
322 from rhodecode.model.db import CacheKey, Session, IntegrityError
338
323
339 invalidation_namespace = invalidation_namespace or self.invalidation_namespace
324 cache_obj = CacheKey.get_active_cache(self.cache_key)
340 # fetch all cache keys for this namespace and convert them to a map to find if we
341 # have specific cache_key object registered. We do this because we want to have
342 # all consistent cache_state_uid for newly registered objects
343 cache_obj_map = CacheKey.get_namespace_map(invalidation_namespace)
344 cache_obj = cache_obj_map.get(self.cache_key)
345 log.debug('Fetched cache obj %s using %s cache key.', cache_obj, self.cache_key)
325 log.debug('Fetched cache obj %s using %s cache key.', cache_obj, self.cache_key)
346
326
347 if not cache_obj:
327 if not cache_obj:
348 new_cache_args = invalidation_namespace
328 # generate new UID for non-existing cache object
349 first_cache_obj = next(iter(cache_obj_map.values())) if cache_obj_map else None
329 cache_state_uid = CacheKey.generate_new_state_uid()
350 cache_state_uid = None
330 cache_obj = CacheKey(self.cache_key, cache_args=f'repo_state:{self._start_time}',
351 if first_cache_obj:
331 cache_state_uid=cache_state_uid, cache_active=True)
352 cache_state_uid = first_cache_obj.cache_state_uid
332 try:
353 cache_obj = CacheKey(self.cache_key, cache_args=new_cache_args,
333 Session().add(cache_obj)
354 cache_state_uid=cache_state_uid)
334 Session().commit()
355
335 except IntegrityError:
336 # if we catch integrity error, it means we inserted this object
337 # assumption is that's really an edge race-condition case and
338 # it's safe is to skip it
339 Session().rollback()
340 except Exception:
341 log.exception('Failed to commit on cache key update')
342 Session().rollback()
343 if self.raise_exception:
344 raise
356 return cache_obj
345 return cache_obj
357
346
358 def __enter__(self):
347 def __enter__(self):
359 """
348 log.debug('Entering cache invalidation check context: %s', self)
360 Test if current object is valid, and return CacheRegion function
361 that does invalidation and calculation
362 """
363 log.debug('Entering cache invalidation check context: %s', self.invalidation_namespace)
364 # register or get a new key based on uid
365 self.cache_obj = self.get_or_create_cache_obj(cache_type=self.uid)
366 cache_data = self.cache_obj.get_dict()
367 self._start_time = time.time()
349 self._start_time = time.time()
368 if self.cache_obj.cache_active:
369 # means our cache obj is existing and marked as it's
370 # cache is not outdated, we return ActiveRegionCache
371 self.skip_cache_active_change = True
372
350
373 return ActiveRegionCache(context=self, cache_data=cache_data)
351 self.cache_obj = self.get_or_create_cache_obj()
352 cache_data = self.cache_obj.get_dict()
374
353
375 # the key is either not existing or set to False, we return
354 return ActiveRegionCache(context=self, cache_data=cache_data)
376 # the real invalidator which re-computes value. We additionally set
377 # the flag to actually update the Database objects
378 self.skip_cache_active_change = False
379 return FreshRegionCache(context=self, cache_data=cache_data)
380
355
381 def __exit__(self, exc_type, exc_val, exc_tb):
356 def __exit__(self, exc_type, exc_val, exc_tb):
382 from rhodecode.model.db import IntegrityError, Session
383
384 # save compute time
357 # save compute time
385 self.compute_time = time.time() - self._start_time
358 self.compute_time = time.time() - self._start_time
386
387 if self.skip_cache_active_change:
388 return
389
390 try:
391 self.cache_obj.cache_active = True
392 Session().add(self.cache_obj)
393 Session().commit()
394 except IntegrityError:
395 # if we catch integrity error, it means we inserted this object
396 # assumption is that's really an edge race-condition case and
397 # it's safe is to skip it
398 Session().rollback()
399 except Exception:
400 log.exception('Failed to commit on cache key update')
401 Session().rollback()
402 if self.raise_exception:
403 raise
@@ -1,5877 +1,5886 b''
1 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 """
19 """
20 Database Models for RhodeCode Enterprise
20 Database Models for RhodeCode Enterprise
21 """
21 """
22
22
23 import re
23 import re
24 import os
24 import os
25 import time
25 import time
26 import string
26 import string
27 import logging
27 import logging
28 import datetime
28 import datetime
29 import uuid
29 import uuid
30 import warnings
30 import warnings
31 import ipaddress
31 import ipaddress
32 import functools
32 import functools
33 import traceback
33 import traceback
34 import collections
34 import collections
35
35
36 from sqlalchemy import (
36 from sqlalchemy import (
37 or_, and_, not_, func, cast, TypeDecorator, event, select,
37 or_, and_, not_, func, cast, TypeDecorator, event, select,
38 true, false, null,
38 true, false, null,
39 Index, Sequence, UniqueConstraint, ForeignKey, CheckConstraint, Column,
39 Index, Sequence, UniqueConstraint, ForeignKey, CheckConstraint, Column,
40 Boolean, String, Unicode, UnicodeText, DateTime, Integer, LargeBinary,
40 Boolean, String, Unicode, UnicodeText, DateTime, Integer, LargeBinary,
41 Text, Float, PickleType, BigInteger)
41 Text, Float, PickleType, BigInteger)
42 from sqlalchemy.sql.expression import case
42 from sqlalchemy.sql.expression import case
43 from sqlalchemy.sql.functions import coalesce, count # pragma: no cover
43 from sqlalchemy.sql.functions import coalesce, count # pragma: no cover
44 from sqlalchemy.orm import (
44 from sqlalchemy.orm import (
45 relationship, lazyload, joinedload, class_mapper, validates, aliased, load_only)
45 relationship, lazyload, joinedload, class_mapper, validates, aliased, load_only)
46 from sqlalchemy.ext.declarative import declared_attr
46 from sqlalchemy.ext.declarative import declared_attr
47 from sqlalchemy.ext.hybrid import hybrid_property
47 from sqlalchemy.ext.hybrid import hybrid_property
48 from sqlalchemy.exc import IntegrityError # pragma: no cover
48 from sqlalchemy.exc import IntegrityError # pragma: no cover
49 from sqlalchemy.dialects.mysql import LONGTEXT
49 from sqlalchemy.dialects.mysql import LONGTEXT
50 from zope.cachedescriptors.property import Lazy as LazyProperty
50 from zope.cachedescriptors.property import Lazy as LazyProperty
51 from pyramid.threadlocal import get_current_request
51 from pyramid.threadlocal import get_current_request
52 from webhelpers2.text import remove_formatting
52 from webhelpers2.text import remove_formatting
53
53
54 from rhodecode.lib.str_utils import safe_bytes
54 from rhodecode.lib.str_utils import safe_bytes
55 from rhodecode.translation import _
55 from rhodecode.translation import _
56 from rhodecode.lib.vcs import get_vcs_instance, VCSError
56 from rhodecode.lib.vcs import get_vcs_instance, VCSError
57 from rhodecode.lib.vcs.backends.base import (
57 from rhodecode.lib.vcs.backends.base import (
58 EmptyCommit, Reference, unicode_to_reference, reference_to_unicode)
58 EmptyCommit, Reference, unicode_to_reference, reference_to_unicode)
59 from rhodecode.lib.utils2 import (
59 from rhodecode.lib.utils2 import (
60 str2bool, safe_str, get_commit_safe, sha1_safe,
60 str2bool, safe_str, get_commit_safe, sha1_safe,
61 time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict,
61 time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict,
62 glob2re, StrictAttributeDict, cleaned_uri, datetime_to_time)
62 glob2re, StrictAttributeDict, cleaned_uri, datetime_to_time)
63 from rhodecode.lib.jsonalchemy import (
63 from rhodecode.lib.jsonalchemy import (
64 MutationObj, MutationList, JsonType, JsonRaw)
64 MutationObj, MutationList, JsonType, JsonRaw)
65 from rhodecode.lib.hash_utils import sha1
65 from rhodecode.lib.hash_utils import sha1
66 from rhodecode.lib import ext_json
66 from rhodecode.lib import ext_json
67 from rhodecode.lib import enc_utils
67 from rhodecode.lib import enc_utils
68 from rhodecode.lib.ext_json import json, str_json
68 from rhodecode.lib.ext_json import json, str_json
69 from rhodecode.lib.caching_query import FromCache
69 from rhodecode.lib.caching_query import FromCache
70 from rhodecode.lib.exceptions import (
70 from rhodecode.lib.exceptions import (
71 ArtifactMetadataDuplicate, ArtifactMetadataBadValueType)
71 ArtifactMetadataDuplicate, ArtifactMetadataBadValueType)
72 from rhodecode.model.meta import Base, Session
72 from rhodecode.model.meta import Base, Session
73
73
74 URL_SEP = '/'
74 URL_SEP = '/'
75 log = logging.getLogger(__name__)
75 log = logging.getLogger(__name__)
76
76
77 # =============================================================================
77 # =============================================================================
78 # BASE CLASSES
78 # BASE CLASSES
79 # =============================================================================
79 # =============================================================================
80
80
81 # this is propagated from .ini file rhodecode.encrypted_values.secret or
81 # this is propagated from .ini file rhodecode.encrypted_values.secret or
82 # beaker.session.secret if first is not set.
82 # beaker.session.secret if first is not set.
83 # and initialized at environment.py
83 # and initialized at environment.py
84 ENCRYPTION_KEY: bytes = b''
84 ENCRYPTION_KEY: bytes = b''
85
85
86 # used to sort permissions by types, '#' used here is not allowed to be in
86 # used to sort permissions by types, '#' used here is not allowed to be in
87 # usernames, and it's very early in sorted string.printable table.
87 # usernames, and it's very early in sorted string.printable table.
88 PERMISSION_TYPE_SORT = {
88 PERMISSION_TYPE_SORT = {
89 'admin': '####',
89 'admin': '####',
90 'write': '###',
90 'write': '###',
91 'read': '##',
91 'read': '##',
92 'none': '#',
92 'none': '#',
93 }
93 }
94
94
95
95
96 def display_user_sort(obj):
96 def display_user_sort(obj):
97 """
97 """
98 Sort function used to sort permissions in .permissions() function of
98 Sort function used to sort permissions in .permissions() function of
99 Repository, RepoGroup, UserGroup. Also it put the default user in front
99 Repository, RepoGroup, UserGroup. Also it put the default user in front
100 of all other resources
100 of all other resources
101 """
101 """
102
102
103 if obj.username == User.DEFAULT_USER:
103 if obj.username == User.DEFAULT_USER:
104 return '#####'
104 return '#####'
105 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
105 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
106 extra_sort_num = '1' # default
106 extra_sort_num = '1' # default
107
107
108 # NOTE(dan): inactive duplicates goes last
108 # NOTE(dan): inactive duplicates goes last
109 if getattr(obj, 'duplicate_perm', None):
109 if getattr(obj, 'duplicate_perm', None):
110 extra_sort_num = '9'
110 extra_sort_num = '9'
111 return prefix + extra_sort_num + obj.username
111 return prefix + extra_sort_num + obj.username
112
112
113
113
114 def display_user_group_sort(obj):
114 def display_user_group_sort(obj):
115 """
115 """
116 Sort function used to sort permissions in .permissions() function of
116 Sort function used to sort permissions in .permissions() function of
117 Repository, RepoGroup, UserGroup. Also it put the default user in front
117 Repository, RepoGroup, UserGroup. Also it put the default user in front
118 of all other resources
118 of all other resources
119 """
119 """
120
120
121 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
121 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
122 return prefix + obj.users_group_name
122 return prefix + obj.users_group_name
123
123
124
124
125 def _hash_key(k):
125 def _hash_key(k):
126 return sha1_safe(k)
126 return sha1_safe(k)
127
127
128
128
129 def in_filter_generator(qry, items, limit=500):
129 def in_filter_generator(qry, items, limit=500):
130 """
130 """
131 Splits IN() into multiple with OR
131 Splits IN() into multiple with OR
132 e.g.::
132 e.g.::
133 cnt = Repository.query().filter(
133 cnt = Repository.query().filter(
134 or_(
134 or_(
135 *in_filter_generator(Repository.repo_id, range(100000))
135 *in_filter_generator(Repository.repo_id, range(100000))
136 )).count()
136 )).count()
137 """
137 """
138 if not items:
138 if not items:
139 # empty list will cause empty query which might cause security issues
139 # empty list will cause empty query which might cause security issues
140 # this can lead to hidden unpleasant results
140 # this can lead to hidden unpleasant results
141 items = [-1]
141 items = [-1]
142
142
143 parts = []
143 parts = []
144 for chunk in range(0, len(items), limit):
144 for chunk in range(0, len(items), limit):
145 parts.append(
145 parts.append(
146 qry.in_(items[chunk: chunk + limit])
146 qry.in_(items[chunk: chunk + limit])
147 )
147 )
148
148
149 return parts
149 return parts
150
150
151
151
152 base_table_args = {
152 base_table_args = {
153 'extend_existing': True,
153 'extend_existing': True,
154 'mysql_engine': 'InnoDB',
154 'mysql_engine': 'InnoDB',
155 'mysql_charset': 'utf8',
155 'mysql_charset': 'utf8',
156 'sqlite_autoincrement': True
156 'sqlite_autoincrement': True
157 }
157 }
158
158
159
159
160 class EncryptedTextValue(TypeDecorator):
160 class EncryptedTextValue(TypeDecorator):
161 """
161 """
162 Special column for encrypted long text data, use like::
162 Special column for encrypted long text data, use like::
163
163
164 value = Column("encrypted_value", EncryptedValue(), nullable=False)
164 value = Column("encrypted_value", EncryptedValue(), nullable=False)
165
165
166 This column is intelligent so if value is in unencrypted form it return
166 This column is intelligent so if value is in unencrypted form it return
167 unencrypted form, but on save it always encrypts
167 unencrypted form, but on save it always encrypts
168 """
168 """
169 cache_ok = True
169 cache_ok = True
170 impl = Text
170 impl = Text
171
171
172 def process_bind_param(self, value, dialect):
172 def process_bind_param(self, value, dialect):
173 """
173 """
174 Setter for storing value
174 Setter for storing value
175 """
175 """
176 import rhodecode
176 import rhodecode
177 if not value:
177 if not value:
178 return value
178 return value
179
179
180 # protect against double encrypting if values is already encrypted
180 # protect against double encrypting if values is already encrypted
181 if value.startswith('enc$aes$') \
181 if value.startswith('enc$aes$') \
182 or value.startswith('enc$aes_hmac$') \
182 or value.startswith('enc$aes_hmac$') \
183 or value.startswith('enc2$'):
183 or value.startswith('enc2$'):
184 raise ValueError('value needs to be in unencrypted format, '
184 raise ValueError('value needs to be in unencrypted format, '
185 'ie. not starting with enc$ or enc2$')
185 'ie. not starting with enc$ or enc2$')
186
186
187 algo = rhodecode.CONFIG.get('rhodecode.encrypted_values.algorithm') or 'aes'
187 algo = rhodecode.CONFIG.get('rhodecode.encrypted_values.algorithm') or 'aes'
188 bytes_val = enc_utils.encrypt_value(value, enc_key=ENCRYPTION_KEY, algo=algo)
188 bytes_val = enc_utils.encrypt_value(value, enc_key=ENCRYPTION_KEY, algo=algo)
189 return safe_str(bytes_val)
189 return safe_str(bytes_val)
190
190
191 def process_result_value(self, value, dialect):
191 def process_result_value(self, value, dialect):
192 """
192 """
193 Getter for retrieving value
193 Getter for retrieving value
194 """
194 """
195
195
196 import rhodecode
196 import rhodecode
197 if not value:
197 if not value:
198 return value
198 return value
199
199
200 enc_strict_mode = rhodecode.ConfigGet().get_bool('rhodecode.encrypted_values.strict', missing=True)
200 enc_strict_mode = rhodecode.ConfigGet().get_bool('rhodecode.encrypted_values.strict', missing=True)
201
201
202 bytes_val = enc_utils.decrypt_value(value, enc_key=ENCRYPTION_KEY, strict_mode=enc_strict_mode)
202 bytes_val = enc_utils.decrypt_value(value, enc_key=ENCRYPTION_KEY, strict_mode=enc_strict_mode)
203
203
204 return safe_str(bytes_val)
204 return safe_str(bytes_val)
205
205
206
206
207 class BaseModel(object):
207 class BaseModel(object):
208 """
208 """
209 Base Model for all classes
209 Base Model for all classes
210 """
210 """
211
211
212 @classmethod
212 @classmethod
213 def _get_keys(cls):
213 def _get_keys(cls):
214 """return column names for this model """
214 """return column names for this model """
215 return class_mapper(cls).c.keys()
215 return class_mapper(cls).c.keys()
216
216
217 def get_dict(self):
217 def get_dict(self):
218 """
218 """
219 return dict with keys and values corresponding
219 return dict with keys and values corresponding
220 to this model data """
220 to this model data """
221
221
222 d = {}
222 d = {}
223 for k in self._get_keys():
223 for k in self._get_keys():
224 d[k] = getattr(self, k)
224 d[k] = getattr(self, k)
225
225
226 # also use __json__() if present to get additional fields
226 # also use __json__() if present to get additional fields
227 _json_attr = getattr(self, '__json__', None)
227 _json_attr = getattr(self, '__json__', None)
228 if _json_attr:
228 if _json_attr:
229 # update with attributes from __json__
229 # update with attributes from __json__
230 if callable(_json_attr):
230 if callable(_json_attr):
231 _json_attr = _json_attr()
231 _json_attr = _json_attr()
232 for k, val in _json_attr.items():
232 for k, val in _json_attr.items():
233 d[k] = val
233 d[k] = val
234 return d
234 return d
235
235
236 def get_appstruct(self):
236 def get_appstruct(self):
237 """return list with keys and values tuples corresponding
237 """return list with keys and values tuples corresponding
238 to this model data """
238 to this model data """
239
239
240 lst = []
240 lst = []
241 for k in self._get_keys():
241 for k in self._get_keys():
242 lst.append((k, getattr(self, k),))
242 lst.append((k, getattr(self, k),))
243 return lst
243 return lst
244
244
245 def populate_obj(self, populate_dict):
245 def populate_obj(self, populate_dict):
246 """populate model with data from given populate_dict"""
246 """populate model with data from given populate_dict"""
247
247
248 for k in self._get_keys():
248 for k in self._get_keys():
249 if k in populate_dict:
249 if k in populate_dict:
250 setattr(self, k, populate_dict[k])
250 setattr(self, k, populate_dict[k])
251
251
252 @classmethod
252 @classmethod
253 def query(cls):
253 def query(cls):
254 return Session().query(cls)
254 return Session().query(cls)
255
255
256 @classmethod
256 @classmethod
257 def select(cls, custom_cls=None):
257 def select(cls, custom_cls=None):
258 """
258 """
259 stmt = cls.select().where(cls.user_id==1)
259 stmt = cls.select().where(cls.user_id==1)
260 # optionally
260 # optionally
261 stmt = cls.select(User.user_id).where(cls.user_id==1)
261 stmt = cls.select(User.user_id).where(cls.user_id==1)
262 result = cls.execute(stmt) | cls.scalars(stmt)
262 result = cls.execute(stmt) | cls.scalars(stmt)
263 """
263 """
264
264
265 if custom_cls:
265 if custom_cls:
266 stmt = select(custom_cls)
266 stmt = select(custom_cls)
267 else:
267 else:
268 stmt = select(cls)
268 stmt = select(cls)
269 return stmt
269 return stmt
270
270
271 @classmethod
271 @classmethod
272 def execute(cls, stmt):
272 def execute(cls, stmt):
273 return Session().execute(stmt)
273 return Session().execute(stmt)
274
274
275 @classmethod
275 @classmethod
276 def scalars(cls, stmt):
276 def scalars(cls, stmt):
277 return Session().scalars(stmt)
277 return Session().scalars(stmt)
278
278
279 @classmethod
279 @classmethod
280 def get(cls, id_):
280 def get(cls, id_):
281 if id_:
281 if id_:
282 return cls.query().get(id_)
282 return cls.query().get(id_)
283
283
284 @classmethod
284 @classmethod
285 def get_or_404(cls, id_):
285 def get_or_404(cls, id_):
286 from pyramid.httpexceptions import HTTPNotFound
286 from pyramid.httpexceptions import HTTPNotFound
287
287
288 try:
288 try:
289 id_ = int(id_)
289 id_ = int(id_)
290 except (TypeError, ValueError):
290 except (TypeError, ValueError):
291 raise HTTPNotFound()
291 raise HTTPNotFound()
292
292
293 res = cls.query().get(id_)
293 res = cls.query().get(id_)
294 if not res:
294 if not res:
295 raise HTTPNotFound()
295 raise HTTPNotFound()
296 return res
296 return res
297
297
298 @classmethod
298 @classmethod
299 def getAll(cls):
299 def getAll(cls):
300 # deprecated and left for backward compatibility
300 # deprecated and left for backward compatibility
301 return cls.get_all()
301 return cls.get_all()
302
302
303 @classmethod
303 @classmethod
304 def get_all(cls):
304 def get_all(cls):
305 return cls.query().all()
305 return cls.query().all()
306
306
307 @classmethod
307 @classmethod
308 def delete(cls, id_):
308 def delete(cls, id_):
309 obj = cls.query().get(id_)
309 obj = cls.query().get(id_)
310 Session().delete(obj)
310 Session().delete(obj)
311
311
312 @classmethod
312 @classmethod
313 def identity_cache(cls, session, attr_name, value):
313 def identity_cache(cls, session, attr_name, value):
314 exist_in_session = []
314 exist_in_session = []
315 for (item_cls, pkey), instance in session.identity_map.items():
315 for (item_cls, pkey), instance in session.identity_map.items():
316 if cls == item_cls and getattr(instance, attr_name) == value:
316 if cls == item_cls and getattr(instance, attr_name) == value:
317 exist_in_session.append(instance)
317 exist_in_session.append(instance)
318 if exist_in_session:
318 if exist_in_session:
319 if len(exist_in_session) == 1:
319 if len(exist_in_session) == 1:
320 return exist_in_session[0]
320 return exist_in_session[0]
321 log.exception(
321 log.exception(
322 'multiple objects with attr %s and '
322 'multiple objects with attr %s and '
323 'value %s found with same name: %r',
323 'value %s found with same name: %r',
324 attr_name, value, exist_in_session)
324 attr_name, value, exist_in_session)
325
325
326 @property
326 @property
327 def cls_name(self):
327 def cls_name(self):
328 return self.__class__.__name__
328 return self.__class__.__name__
329
329
330 def __repr__(self):
330 def __repr__(self):
331 return f'<DB:{self.cls_name}>'
331 return f'<DB:{self.cls_name}>'
332
332
333
333
334 class RhodeCodeSetting(Base, BaseModel):
334 class RhodeCodeSetting(Base, BaseModel):
335 __tablename__ = 'rhodecode_settings'
335 __tablename__ = 'rhodecode_settings'
336 __table_args__ = (
336 __table_args__ = (
337 UniqueConstraint('app_settings_name'),
337 UniqueConstraint('app_settings_name'),
338 base_table_args
338 base_table_args
339 )
339 )
340
340
341 SETTINGS_TYPES = {
341 SETTINGS_TYPES = {
342 'str': safe_str,
342 'str': safe_str,
343 'int': safe_int,
343 'int': safe_int,
344 'unicode': safe_str,
344 'unicode': safe_str,
345 'bool': str2bool,
345 'bool': str2bool,
346 'list': functools.partial(aslist, sep=',')
346 'list': functools.partial(aslist, sep=',')
347 }
347 }
348 DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions'
348 DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions'
349 GLOBAL_CONF_KEY = 'app_settings'
349 GLOBAL_CONF_KEY = 'app_settings'
350
350
351 app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
351 app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
352 app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None)
352 app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None)
353 _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None)
353 _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None)
354 _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None)
354 _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None)
355
355
356 def __init__(self, key='', val='', type='unicode'):
356 def __init__(self, key='', val='', type='unicode'):
357 self.app_settings_name = key
357 self.app_settings_name = key
358 self.app_settings_type = type
358 self.app_settings_type = type
359 self.app_settings_value = val
359 self.app_settings_value = val
360
360
361 @validates('_app_settings_value')
361 @validates('_app_settings_value')
362 def validate_settings_value(self, key, val):
362 def validate_settings_value(self, key, val):
363 assert type(val) == str
363 assert type(val) == str
364 return val
364 return val
365
365
366 @hybrid_property
366 @hybrid_property
367 def app_settings_value(self):
367 def app_settings_value(self):
368 v = self._app_settings_value
368 v = self._app_settings_value
369 _type = self.app_settings_type
369 _type = self.app_settings_type
370 if _type:
370 if _type:
371 _type = self.app_settings_type.split('.')[0]
371 _type = self.app_settings_type.split('.')[0]
372 # decode the encrypted value
372 # decode the encrypted value
373 if 'encrypted' in self.app_settings_type:
373 if 'encrypted' in self.app_settings_type:
374 cipher = EncryptedTextValue()
374 cipher = EncryptedTextValue()
375 v = safe_str(cipher.process_result_value(v, None))
375 v = safe_str(cipher.process_result_value(v, None))
376
376
377 converter = self.SETTINGS_TYPES.get(_type) or \
377 converter = self.SETTINGS_TYPES.get(_type) or \
378 self.SETTINGS_TYPES['unicode']
378 self.SETTINGS_TYPES['unicode']
379 return converter(v)
379 return converter(v)
380
380
381 @app_settings_value.setter
381 @app_settings_value.setter
382 def app_settings_value(self, val):
382 def app_settings_value(self, val):
383 """
383 """
384 Setter that will always make sure we use unicode in app_settings_value
384 Setter that will always make sure we use unicode in app_settings_value
385
385
386 :param val:
386 :param val:
387 """
387 """
388 val = safe_str(val)
388 val = safe_str(val)
389 # encode the encrypted value
389 # encode the encrypted value
390 if 'encrypted' in self.app_settings_type:
390 if 'encrypted' in self.app_settings_type:
391 cipher = EncryptedTextValue()
391 cipher = EncryptedTextValue()
392 val = safe_str(cipher.process_bind_param(val, None))
392 val = safe_str(cipher.process_bind_param(val, None))
393 self._app_settings_value = val
393 self._app_settings_value = val
394
394
395 @hybrid_property
395 @hybrid_property
396 def app_settings_type(self):
396 def app_settings_type(self):
397 return self._app_settings_type
397 return self._app_settings_type
398
398
399 @app_settings_type.setter
399 @app_settings_type.setter
400 def app_settings_type(self, val):
400 def app_settings_type(self, val):
401 if val.split('.')[0] not in self.SETTINGS_TYPES:
401 if val.split('.')[0] not in self.SETTINGS_TYPES:
402 raise Exception('type must be one of %s got %s'
402 raise Exception('type must be one of %s got %s'
403 % (self.SETTINGS_TYPES.keys(), val))
403 % (self.SETTINGS_TYPES.keys(), val))
404 self._app_settings_type = val
404 self._app_settings_type = val
405
405
406 @classmethod
406 @classmethod
407 def get_by_prefix(cls, prefix):
407 def get_by_prefix(cls, prefix):
408 return RhodeCodeSetting.query()\
408 return RhodeCodeSetting.query()\
409 .filter(RhodeCodeSetting.app_settings_name.startswith(prefix))\
409 .filter(RhodeCodeSetting.app_settings_name.startswith(prefix))\
410 .all()
410 .all()
411
411
412 def __repr__(self):
412 def __repr__(self):
413 return "<%s('%s:%s[%s]')>" % (
413 return "<%s('%s:%s[%s]')>" % (
414 self.cls_name,
414 self.cls_name,
415 self.app_settings_name, self.app_settings_value,
415 self.app_settings_name, self.app_settings_value,
416 self.app_settings_type
416 self.app_settings_type
417 )
417 )
418
418
419
419
420 class RhodeCodeUi(Base, BaseModel):
420 class RhodeCodeUi(Base, BaseModel):
421 __tablename__ = 'rhodecode_ui'
421 __tablename__ = 'rhodecode_ui'
422 __table_args__ = (
422 __table_args__ = (
423 UniqueConstraint('ui_key'),
423 UniqueConstraint('ui_key'),
424 base_table_args
424 base_table_args
425 )
425 )
426 # Sync those values with vcsserver.config.hooks
426 # Sync those values with vcsserver.config.hooks
427
427
428 HOOK_REPO_SIZE = 'changegroup.repo_size'
428 HOOK_REPO_SIZE = 'changegroup.repo_size'
429 # HG
429 # HG
430 HOOK_PRE_PULL = 'preoutgoing.pre_pull'
430 HOOK_PRE_PULL = 'preoutgoing.pre_pull'
431 HOOK_PULL = 'outgoing.pull_logger'
431 HOOK_PULL = 'outgoing.pull_logger'
432 HOOK_PRE_PUSH = 'prechangegroup.pre_push'
432 HOOK_PRE_PUSH = 'prechangegroup.pre_push'
433 HOOK_PRETX_PUSH = 'pretxnchangegroup.pre_push'
433 HOOK_PRETX_PUSH = 'pretxnchangegroup.pre_push'
434 HOOK_PUSH = 'changegroup.push_logger'
434 HOOK_PUSH = 'changegroup.push_logger'
435 HOOK_PUSH_KEY = 'pushkey.key_push'
435 HOOK_PUSH_KEY = 'pushkey.key_push'
436
436
437 HOOKS_BUILTIN = [
437 HOOKS_BUILTIN = [
438 HOOK_PRE_PULL,
438 HOOK_PRE_PULL,
439 HOOK_PULL,
439 HOOK_PULL,
440 HOOK_PRE_PUSH,
440 HOOK_PRE_PUSH,
441 HOOK_PRETX_PUSH,
441 HOOK_PRETX_PUSH,
442 HOOK_PUSH,
442 HOOK_PUSH,
443 HOOK_PUSH_KEY,
443 HOOK_PUSH_KEY,
444 ]
444 ]
445
445
446 # TODO: johbo: Unify way how hooks are configured for git and hg,
446 # TODO: johbo: Unify way how hooks are configured for git and hg,
447 # git part is currently hardcoded.
447 # git part is currently hardcoded.
448
448
449 # SVN PATTERNS
449 # SVN PATTERNS
450 SVN_BRANCH_ID = 'vcs_svn_branch'
450 SVN_BRANCH_ID = 'vcs_svn_branch'
451 SVN_TAG_ID = 'vcs_svn_tag'
451 SVN_TAG_ID = 'vcs_svn_tag'
452
452
453 ui_id = Column(
453 ui_id = Column(
454 "ui_id", Integer(), nullable=False, unique=True, default=None,
454 "ui_id", Integer(), nullable=False, unique=True, default=None,
455 primary_key=True)
455 primary_key=True)
456 ui_section = Column(
456 ui_section = Column(
457 "ui_section", String(255), nullable=True, unique=None, default=None)
457 "ui_section", String(255), nullable=True, unique=None, default=None)
458 ui_key = Column(
458 ui_key = Column(
459 "ui_key", String(255), nullable=True, unique=None, default=None)
459 "ui_key", String(255), nullable=True, unique=None, default=None)
460 ui_value = Column(
460 ui_value = Column(
461 "ui_value", String(255), nullable=True, unique=None, default=None)
461 "ui_value", String(255), nullable=True, unique=None, default=None)
462 ui_active = Column(
462 ui_active = Column(
463 "ui_active", Boolean(), nullable=True, unique=None, default=True)
463 "ui_active", Boolean(), nullable=True, unique=None, default=True)
464
464
465 def __repr__(self):
465 def __repr__(self):
466 return '<%s[%s]%s=>%s]>' % (self.cls_name, self.ui_section,
466 return '<%s[%s]%s=>%s]>' % (self.cls_name, self.ui_section,
467 self.ui_key, self.ui_value)
467 self.ui_key, self.ui_value)
468
468
469
469
470 class RepoRhodeCodeSetting(Base, BaseModel):
470 class RepoRhodeCodeSetting(Base, BaseModel):
471 __tablename__ = 'repo_rhodecode_settings'
471 __tablename__ = 'repo_rhodecode_settings'
472 __table_args__ = (
472 __table_args__ = (
473 UniqueConstraint(
473 UniqueConstraint(
474 'app_settings_name', 'repository_id',
474 'app_settings_name', 'repository_id',
475 name='uq_repo_rhodecode_setting_name_repo_id'),
475 name='uq_repo_rhodecode_setting_name_repo_id'),
476 base_table_args
476 base_table_args
477 )
477 )
478
478
479 repository_id = Column(
479 repository_id = Column(
480 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
480 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
481 nullable=False)
481 nullable=False)
482 app_settings_id = Column(
482 app_settings_id = Column(
483 "app_settings_id", Integer(), nullable=False, unique=True,
483 "app_settings_id", Integer(), nullable=False, unique=True,
484 default=None, primary_key=True)
484 default=None, primary_key=True)
485 app_settings_name = Column(
485 app_settings_name = Column(
486 "app_settings_name", String(255), nullable=True, unique=None,
486 "app_settings_name", String(255), nullable=True, unique=None,
487 default=None)
487 default=None)
488 _app_settings_value = Column(
488 _app_settings_value = Column(
489 "app_settings_value", String(4096), nullable=True, unique=None,
489 "app_settings_value", String(4096), nullable=True, unique=None,
490 default=None)
490 default=None)
491 _app_settings_type = Column(
491 _app_settings_type = Column(
492 "app_settings_type", String(255), nullable=True, unique=None,
492 "app_settings_type", String(255), nullable=True, unique=None,
493 default=None)
493 default=None)
494
494
495 repository = relationship('Repository', viewonly=True)
495 repository = relationship('Repository', viewonly=True)
496
496
497 def __init__(self, repository_id, key='', val='', type='unicode'):
497 def __init__(self, repository_id, key='', val='', type='unicode'):
498 self.repository_id = repository_id
498 self.repository_id = repository_id
499 self.app_settings_name = key
499 self.app_settings_name = key
500 self.app_settings_type = type
500 self.app_settings_type = type
501 self.app_settings_value = val
501 self.app_settings_value = val
502
502
503 @validates('_app_settings_value')
503 @validates('_app_settings_value')
504 def validate_settings_value(self, key, val):
504 def validate_settings_value(self, key, val):
505 assert type(val) == str
505 assert type(val) == str
506 return val
506 return val
507
507
508 @hybrid_property
508 @hybrid_property
509 def app_settings_value(self):
509 def app_settings_value(self):
510 v = self._app_settings_value
510 v = self._app_settings_value
511 type_ = self.app_settings_type
511 type_ = self.app_settings_type
512 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
512 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
513 converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode']
513 converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode']
514 return converter(v)
514 return converter(v)
515
515
516 @app_settings_value.setter
516 @app_settings_value.setter
517 def app_settings_value(self, val):
517 def app_settings_value(self, val):
518 """
518 """
519 Setter that will always make sure we use unicode in app_settings_value
519 Setter that will always make sure we use unicode in app_settings_value
520
520
521 :param val:
521 :param val:
522 """
522 """
523 self._app_settings_value = safe_str(val)
523 self._app_settings_value = safe_str(val)
524
524
525 @hybrid_property
525 @hybrid_property
526 def app_settings_type(self):
526 def app_settings_type(self):
527 return self._app_settings_type
527 return self._app_settings_type
528
528
529 @app_settings_type.setter
529 @app_settings_type.setter
530 def app_settings_type(self, val):
530 def app_settings_type(self, val):
531 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
531 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
532 if val not in SETTINGS_TYPES:
532 if val not in SETTINGS_TYPES:
533 raise Exception('type must be one of %s got %s'
533 raise Exception('type must be one of %s got %s'
534 % (SETTINGS_TYPES.keys(), val))
534 % (SETTINGS_TYPES.keys(), val))
535 self._app_settings_type = val
535 self._app_settings_type = val
536
536
537 def __repr__(self):
537 def __repr__(self):
538 return "<%s('%s:%s:%s[%s]')>" % (
538 return "<%s('%s:%s:%s[%s]')>" % (
539 self.cls_name, self.repository.repo_name,
539 self.cls_name, self.repository.repo_name,
540 self.app_settings_name, self.app_settings_value,
540 self.app_settings_name, self.app_settings_value,
541 self.app_settings_type
541 self.app_settings_type
542 )
542 )
543
543
544
544
545 class RepoRhodeCodeUi(Base, BaseModel):
545 class RepoRhodeCodeUi(Base, BaseModel):
546 __tablename__ = 'repo_rhodecode_ui'
546 __tablename__ = 'repo_rhodecode_ui'
547 __table_args__ = (
547 __table_args__ = (
548 UniqueConstraint(
548 UniqueConstraint(
549 'repository_id', 'ui_section', 'ui_key',
549 'repository_id', 'ui_section', 'ui_key',
550 name='uq_repo_rhodecode_ui_repository_id_section_key'),
550 name='uq_repo_rhodecode_ui_repository_id_section_key'),
551 base_table_args
551 base_table_args
552 )
552 )
553
553
554 repository_id = Column(
554 repository_id = Column(
555 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
555 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
556 nullable=False)
556 nullable=False)
557 ui_id = Column(
557 ui_id = Column(
558 "ui_id", Integer(), nullable=False, unique=True, default=None,
558 "ui_id", Integer(), nullable=False, unique=True, default=None,
559 primary_key=True)
559 primary_key=True)
560 ui_section = Column(
560 ui_section = Column(
561 "ui_section", String(255), nullable=True, unique=None, default=None)
561 "ui_section", String(255), nullable=True, unique=None, default=None)
562 ui_key = Column(
562 ui_key = Column(
563 "ui_key", String(255), nullable=True, unique=None, default=None)
563 "ui_key", String(255), nullable=True, unique=None, default=None)
564 ui_value = Column(
564 ui_value = Column(
565 "ui_value", String(255), nullable=True, unique=None, default=None)
565 "ui_value", String(255), nullable=True, unique=None, default=None)
566 ui_active = Column(
566 ui_active = Column(
567 "ui_active", Boolean(), nullable=True, unique=None, default=True)
567 "ui_active", Boolean(), nullable=True, unique=None, default=True)
568
568
569 repository = relationship('Repository', viewonly=True)
569 repository = relationship('Repository', viewonly=True)
570
570
571 def __repr__(self):
571 def __repr__(self):
572 return '<%s[%s:%s]%s=>%s]>' % (
572 return '<%s[%s:%s]%s=>%s]>' % (
573 self.cls_name, self.repository.repo_name,
573 self.cls_name, self.repository.repo_name,
574 self.ui_section, self.ui_key, self.ui_value)
574 self.ui_section, self.ui_key, self.ui_value)
575
575
576
576
577 class User(Base, BaseModel):
577 class User(Base, BaseModel):
578 __tablename__ = 'users'
578 __tablename__ = 'users'
579 __table_args__ = (
579 __table_args__ = (
580 UniqueConstraint('username'), UniqueConstraint('email'),
580 UniqueConstraint('username'), UniqueConstraint('email'),
581 Index('u_username_idx', 'username'),
581 Index('u_username_idx', 'username'),
582 Index('u_email_idx', 'email'),
582 Index('u_email_idx', 'email'),
583 base_table_args
583 base_table_args
584 )
584 )
585
585
586 DEFAULT_USER = 'default'
586 DEFAULT_USER = 'default'
587 DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org'
587 DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org'
588 DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
588 DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
589
589
590 user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
590 user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
591 username = Column("username", String(255), nullable=True, unique=None, default=None)
591 username = Column("username", String(255), nullable=True, unique=None, default=None)
592 password = Column("password", String(255), nullable=True, unique=None, default=None)
592 password = Column("password", String(255), nullable=True, unique=None, default=None)
593 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
593 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
594 admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
594 admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
595 name = Column("firstname", String(255), nullable=True, unique=None, default=None)
595 name = Column("firstname", String(255), nullable=True, unique=None, default=None)
596 lastname = Column("lastname", String(255), nullable=True, unique=None, default=None)
596 lastname = Column("lastname", String(255), nullable=True, unique=None, default=None)
597 _email = Column("email", String(255), nullable=True, unique=None, default=None)
597 _email = Column("email", String(255), nullable=True, unique=None, default=None)
598 last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
598 last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
599 last_activity = Column('last_activity', DateTime(timezone=False), nullable=True, unique=None, default=None)
599 last_activity = Column('last_activity', DateTime(timezone=False), nullable=True, unique=None, default=None)
600 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
600 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
601
601
602 extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None)
602 extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None)
603 extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None)
603 extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None)
604 _api_key = Column("api_key", String(255), nullable=True, unique=None, default=None)
604 _api_key = Column("api_key", String(255), nullable=True, unique=None, default=None)
605 inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
605 inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
606 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
606 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
607 _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data
607 _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data
608
608
609 user_log = relationship('UserLog', back_populates='user')
609 user_log = relationship('UserLog', back_populates='user')
610 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all, delete-orphan')
610 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all, delete-orphan')
611
611
612 repositories = relationship('Repository', back_populates='user')
612 repositories = relationship('Repository', back_populates='user')
613 repository_groups = relationship('RepoGroup', back_populates='user')
613 repository_groups = relationship('RepoGroup', back_populates='user')
614 user_groups = relationship('UserGroup', back_populates='user')
614 user_groups = relationship('UserGroup', back_populates='user')
615
615
616 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all', back_populates='follows_user')
616 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all', back_populates='follows_user')
617 followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all', back_populates='user')
617 followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all', back_populates='user')
618
618
619 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all, delete-orphan')
619 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all, delete-orphan')
620 repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all, delete-orphan', back_populates='user')
620 repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all, delete-orphan', back_populates='user')
621 user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all, delete-orphan', back_populates='user')
621 user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all, delete-orphan', back_populates='user')
622
622
623 group_member = relationship('UserGroupMember', cascade='all', back_populates='user')
623 group_member = relationship('UserGroupMember', cascade='all', back_populates='user')
624
624
625 notifications = relationship('UserNotification', cascade='all', back_populates='user')
625 notifications = relationship('UserNotification', cascade='all', back_populates='user')
626 # notifications assigned to this user
626 # notifications assigned to this user
627 user_created_notifications = relationship('Notification', cascade='all', back_populates='created_by_user')
627 user_created_notifications = relationship('Notification', cascade='all', back_populates='created_by_user')
628 # comments created by this user
628 # comments created by this user
629 user_comments = relationship('ChangesetComment', cascade='all', back_populates='author')
629 user_comments = relationship('ChangesetComment', cascade='all', back_populates='author')
630 # user profile extra info
630 # user profile extra info
631 user_emails = relationship('UserEmailMap', cascade='all', back_populates='user')
631 user_emails = relationship('UserEmailMap', cascade='all', back_populates='user')
632 user_ip_map = relationship('UserIpMap', cascade='all', back_populates='user')
632 user_ip_map = relationship('UserIpMap', cascade='all', back_populates='user')
633 user_auth_tokens = relationship('UserApiKeys', cascade='all', back_populates='user')
633 user_auth_tokens = relationship('UserApiKeys', cascade='all', back_populates='user')
634 user_ssh_keys = relationship('UserSshKeys', cascade='all', back_populates='user')
634 user_ssh_keys = relationship('UserSshKeys', cascade='all', back_populates='user')
635
635
636 # gists
636 # gists
637 user_gists = relationship('Gist', cascade='all', back_populates='owner')
637 user_gists = relationship('Gist', cascade='all', back_populates='owner')
638 # user pull requests
638 # user pull requests
639 user_pull_requests = relationship('PullRequest', cascade='all', back_populates='author')
639 user_pull_requests = relationship('PullRequest', cascade='all', back_populates='author')
640
640
641 # external identities
641 # external identities
642 external_identities = relationship('ExternalIdentity', primaryjoin="User.user_id==ExternalIdentity.local_user_id", cascade='all')
642 external_identities = relationship('ExternalIdentity', primaryjoin="User.user_id==ExternalIdentity.local_user_id", cascade='all')
643 # review rules
643 # review rules
644 user_review_rules = relationship('RepoReviewRuleUser', cascade='all', back_populates='user')
644 user_review_rules = relationship('RepoReviewRuleUser', cascade='all', back_populates='user')
645
645
646 # artifacts owned
646 # artifacts owned
647 artifacts = relationship('FileStore', primaryjoin='FileStore.user_id==User.user_id', back_populates='upload_user')
647 artifacts = relationship('FileStore', primaryjoin='FileStore.user_id==User.user_id', back_populates='upload_user')
648
648
649 # no cascade, set NULL
649 # no cascade, set NULL
650 scope_artifacts = relationship('FileStore', primaryjoin='FileStore.scope_user_id==User.user_id', cascade='', back_populates='user')
650 scope_artifacts = relationship('FileStore', primaryjoin='FileStore.scope_user_id==User.user_id', cascade='', back_populates='user')
651
651
652 def __repr__(self):
652 def __repr__(self):
653 return f"<{self.cls_name}('id={self.user_id}, username={self.username}')>"
653 return f"<{self.cls_name}('id={self.user_id}, username={self.username}')>"
654
654
655 @hybrid_property
655 @hybrid_property
656 def email(self):
656 def email(self):
657 return self._email
657 return self._email
658
658
659 @email.setter
659 @email.setter
660 def email(self, val):
660 def email(self, val):
661 self._email = val.lower() if val else None
661 self._email = val.lower() if val else None
662
662
663 @hybrid_property
663 @hybrid_property
664 def first_name(self):
664 def first_name(self):
665 from rhodecode.lib import helpers as h
665 from rhodecode.lib import helpers as h
666 if self.name:
666 if self.name:
667 return h.escape(self.name)
667 return h.escape(self.name)
668 return self.name
668 return self.name
669
669
670 @hybrid_property
670 @hybrid_property
671 def last_name(self):
671 def last_name(self):
672 from rhodecode.lib import helpers as h
672 from rhodecode.lib import helpers as h
673 if self.lastname:
673 if self.lastname:
674 return h.escape(self.lastname)
674 return h.escape(self.lastname)
675 return self.lastname
675 return self.lastname
676
676
677 @hybrid_property
677 @hybrid_property
678 def api_key(self):
678 def api_key(self):
679 """
679 """
680 Fetch if exist an auth-token with role ALL connected to this user
680 Fetch if exist an auth-token with role ALL connected to this user
681 """
681 """
682 user_auth_token = UserApiKeys.query()\
682 user_auth_token = UserApiKeys.query()\
683 .filter(UserApiKeys.user_id == self.user_id)\
683 .filter(UserApiKeys.user_id == self.user_id)\
684 .filter(or_(UserApiKeys.expires == -1,
684 .filter(or_(UserApiKeys.expires == -1,
685 UserApiKeys.expires >= time.time()))\
685 UserApiKeys.expires >= time.time()))\
686 .filter(UserApiKeys.role == UserApiKeys.ROLE_ALL).first()
686 .filter(UserApiKeys.role == UserApiKeys.ROLE_ALL).first()
687 if user_auth_token:
687 if user_auth_token:
688 user_auth_token = user_auth_token.api_key
688 user_auth_token = user_auth_token.api_key
689
689
690 return user_auth_token
690 return user_auth_token
691
691
692 @api_key.setter
692 @api_key.setter
693 def api_key(self, val):
693 def api_key(self, val):
694 # don't allow to set API key this is deprecated for now
694 # don't allow to set API key this is deprecated for now
695 self._api_key = None
695 self._api_key = None
696
696
697 @property
697 @property
698 def reviewer_pull_requests(self):
698 def reviewer_pull_requests(self):
699 return PullRequestReviewers.query() \
699 return PullRequestReviewers.query() \
700 .options(joinedload(PullRequestReviewers.pull_request)) \
700 .options(joinedload(PullRequestReviewers.pull_request)) \
701 .filter(PullRequestReviewers.user_id == self.user_id) \
701 .filter(PullRequestReviewers.user_id == self.user_id) \
702 .all()
702 .all()
703
703
704 @property
704 @property
705 def firstname(self):
705 def firstname(self):
706 # alias for future
706 # alias for future
707 return self.name
707 return self.name
708
708
709 @property
709 @property
710 def emails(self):
710 def emails(self):
711 other = UserEmailMap.query()\
711 other = UserEmailMap.query()\
712 .filter(UserEmailMap.user == self) \
712 .filter(UserEmailMap.user == self) \
713 .order_by(UserEmailMap.email_id.asc()) \
713 .order_by(UserEmailMap.email_id.asc()) \
714 .all()
714 .all()
715 return [self.email] + [x.email for x in other]
715 return [self.email] + [x.email for x in other]
716
716
717 def emails_cached(self):
717 def emails_cached(self):
718 emails = []
718 emails = []
719 if self.user_id != self.get_default_user_id():
719 if self.user_id != self.get_default_user_id():
720 emails = UserEmailMap.query()\
720 emails = UserEmailMap.query()\
721 .filter(UserEmailMap.user == self) \
721 .filter(UserEmailMap.user == self) \
722 .order_by(UserEmailMap.email_id.asc())
722 .order_by(UserEmailMap.email_id.asc())
723
723
724 emails = emails.options(
724 emails = emails.options(
725 FromCache("sql_cache_short", f"get_user_{self.user_id}_emails")
725 FromCache("sql_cache_short", f"get_user_{self.user_id}_emails")
726 )
726 )
727
727
728 return [self.email] + [x.email for x in emails]
728 return [self.email] + [x.email for x in emails]
729
729
730 @property
730 @property
731 def auth_tokens(self):
731 def auth_tokens(self):
732 auth_tokens = self.get_auth_tokens()
732 auth_tokens = self.get_auth_tokens()
733 return [x.api_key for x in auth_tokens]
733 return [x.api_key for x in auth_tokens]
734
734
735 def get_auth_tokens(self):
735 def get_auth_tokens(self):
736 return UserApiKeys.query()\
736 return UserApiKeys.query()\
737 .filter(UserApiKeys.user == self)\
737 .filter(UserApiKeys.user == self)\
738 .order_by(UserApiKeys.user_api_key_id.asc())\
738 .order_by(UserApiKeys.user_api_key_id.asc())\
739 .all()
739 .all()
740
740
741 @LazyProperty
741 @LazyProperty
742 def feed_token(self):
742 def feed_token(self):
743 return self.get_feed_token()
743 return self.get_feed_token()
744
744
745 def get_feed_token(self, cache=True):
745 def get_feed_token(self, cache=True):
746 feed_tokens = UserApiKeys.query()\
746 feed_tokens = UserApiKeys.query()\
747 .filter(UserApiKeys.user == self)\
747 .filter(UserApiKeys.user == self)\
748 .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED)
748 .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED)
749 if cache:
749 if cache:
750 feed_tokens = feed_tokens.options(
750 feed_tokens = feed_tokens.options(
751 FromCache("sql_cache_short", f"get_user_feed_token_{self.user_id}"))
751 FromCache("sql_cache_short", f"get_user_feed_token_{self.user_id}"))
752
752
753 feed_tokens = feed_tokens.all()
753 feed_tokens = feed_tokens.all()
754 if feed_tokens:
754 if feed_tokens:
755 return feed_tokens[0].api_key
755 return feed_tokens[0].api_key
756 return 'NO_FEED_TOKEN_AVAILABLE'
756 return 'NO_FEED_TOKEN_AVAILABLE'
757
757
758 @LazyProperty
758 @LazyProperty
759 def artifact_token(self):
759 def artifact_token(self):
760 return self.get_artifact_token()
760 return self.get_artifact_token()
761
761
762 def get_artifact_token(self, cache=True):
762 def get_artifact_token(self, cache=True):
763 artifacts_tokens = UserApiKeys.query()\
763 artifacts_tokens = UserApiKeys.query()\
764 .filter(UserApiKeys.user == self) \
764 .filter(UserApiKeys.user == self) \
765 .filter(or_(UserApiKeys.expires == -1,
765 .filter(or_(UserApiKeys.expires == -1,
766 UserApiKeys.expires >= time.time())) \
766 UserApiKeys.expires >= time.time())) \
767 .filter(UserApiKeys.role == UserApiKeys.ROLE_ARTIFACT_DOWNLOAD)
767 .filter(UserApiKeys.role == UserApiKeys.ROLE_ARTIFACT_DOWNLOAD)
768
768
769 if cache:
769 if cache:
770 artifacts_tokens = artifacts_tokens.options(
770 artifacts_tokens = artifacts_tokens.options(
771 FromCache("sql_cache_short", f"get_user_artifact_token_{self.user_id}"))
771 FromCache("sql_cache_short", f"get_user_artifact_token_{self.user_id}"))
772
772
773 artifacts_tokens = artifacts_tokens.all()
773 artifacts_tokens = artifacts_tokens.all()
774 if artifacts_tokens:
774 if artifacts_tokens:
775 return artifacts_tokens[0].api_key
775 return artifacts_tokens[0].api_key
776 return 'NO_ARTIFACT_TOKEN_AVAILABLE'
776 return 'NO_ARTIFACT_TOKEN_AVAILABLE'
777
777
778 def get_or_create_artifact_token(self):
778 def get_or_create_artifact_token(self):
779 artifacts_tokens = UserApiKeys.query()\
779 artifacts_tokens = UserApiKeys.query()\
780 .filter(UserApiKeys.user == self) \
780 .filter(UserApiKeys.user == self) \
781 .filter(or_(UserApiKeys.expires == -1,
781 .filter(or_(UserApiKeys.expires == -1,
782 UserApiKeys.expires >= time.time())) \
782 UserApiKeys.expires >= time.time())) \
783 .filter(UserApiKeys.role == UserApiKeys.ROLE_ARTIFACT_DOWNLOAD)
783 .filter(UserApiKeys.role == UserApiKeys.ROLE_ARTIFACT_DOWNLOAD)
784
784
785 artifacts_tokens = artifacts_tokens.all()
785 artifacts_tokens = artifacts_tokens.all()
786 if artifacts_tokens:
786 if artifacts_tokens:
787 return artifacts_tokens[0].api_key
787 return artifacts_tokens[0].api_key
788 else:
788 else:
789 from rhodecode.model.auth_token import AuthTokenModel
789 from rhodecode.model.auth_token import AuthTokenModel
790 artifact_token = AuthTokenModel().create(
790 artifact_token = AuthTokenModel().create(
791 self, 'auto-generated-artifact-token',
791 self, 'auto-generated-artifact-token',
792 lifetime=-1, role=UserApiKeys.ROLE_ARTIFACT_DOWNLOAD)
792 lifetime=-1, role=UserApiKeys.ROLE_ARTIFACT_DOWNLOAD)
793 Session.commit()
793 Session.commit()
794 return artifact_token.api_key
794 return artifact_token.api_key
795
795
796 @classmethod
796 @classmethod
797 def get(cls, user_id, cache=False):
797 def get(cls, user_id, cache=False):
798 if not user_id:
798 if not user_id:
799 return
799 return
800
800
801 user = cls.query()
801 user = cls.query()
802 if cache:
802 if cache:
803 user = user.options(
803 user = user.options(
804 FromCache("sql_cache_short", f"get_users_{user_id}"))
804 FromCache("sql_cache_short", f"get_users_{user_id}"))
805 return user.get(user_id)
805 return user.get(user_id)
806
806
807 @classmethod
807 @classmethod
808 def extra_valid_auth_tokens(cls, user, role=None):
808 def extra_valid_auth_tokens(cls, user, role=None):
809 tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\
809 tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\
810 .filter(or_(UserApiKeys.expires == -1,
810 .filter(or_(UserApiKeys.expires == -1,
811 UserApiKeys.expires >= time.time()))
811 UserApiKeys.expires >= time.time()))
812 if role:
812 if role:
813 tokens = tokens.filter(or_(UserApiKeys.role == role,
813 tokens = tokens.filter(or_(UserApiKeys.role == role,
814 UserApiKeys.role == UserApiKeys.ROLE_ALL))
814 UserApiKeys.role == UserApiKeys.ROLE_ALL))
815 return tokens.all()
815 return tokens.all()
816
816
817 def authenticate_by_token(self, auth_token, roles=None, scope_repo_id=None):
817 def authenticate_by_token(self, auth_token, roles=None, scope_repo_id=None):
818 from rhodecode.lib import auth
818 from rhodecode.lib import auth
819
819
820 log.debug('Trying to authenticate user: %s via auth-token, '
820 log.debug('Trying to authenticate user: %s via auth-token, '
821 'and roles: %s', self, roles)
821 'and roles: %s', self, roles)
822
822
823 if not auth_token:
823 if not auth_token:
824 return False
824 return False
825
825
826 roles = (roles or []) + [UserApiKeys.ROLE_ALL]
826 roles = (roles or []) + [UserApiKeys.ROLE_ALL]
827 tokens_q = UserApiKeys.query()\
827 tokens_q = UserApiKeys.query()\
828 .filter(UserApiKeys.user_id == self.user_id)\
828 .filter(UserApiKeys.user_id == self.user_id)\
829 .filter(or_(UserApiKeys.expires == -1,
829 .filter(or_(UserApiKeys.expires == -1,
830 UserApiKeys.expires >= time.time()))
830 UserApiKeys.expires >= time.time()))
831
831
832 tokens_q = tokens_q.filter(UserApiKeys.role.in_(roles))
832 tokens_q = tokens_q.filter(UserApiKeys.role.in_(roles))
833
833
834 crypto_backend = auth.crypto_backend()
834 crypto_backend = auth.crypto_backend()
835 enc_token_map = {}
835 enc_token_map = {}
836 plain_token_map = {}
836 plain_token_map = {}
837 for token in tokens_q:
837 for token in tokens_q:
838 if token.api_key.startswith(crypto_backend.ENC_PREF):
838 if token.api_key.startswith(crypto_backend.ENC_PREF):
839 enc_token_map[token.api_key] = token
839 enc_token_map[token.api_key] = token
840 else:
840 else:
841 plain_token_map[token.api_key] = token
841 plain_token_map[token.api_key] = token
842 log.debug(
842 log.debug(
843 'Found %s plain and %s encrypted tokens to check for authentication for this user',
843 'Found %s plain and %s encrypted tokens to check for authentication for this user',
844 len(plain_token_map), len(enc_token_map))
844 len(plain_token_map), len(enc_token_map))
845
845
846 # plain token match comes first
846 # plain token match comes first
847 match = plain_token_map.get(auth_token)
847 match = plain_token_map.get(auth_token)
848
848
849 # check encrypted tokens now
849 # check encrypted tokens now
850 if not match:
850 if not match:
851 for token_hash, token in enc_token_map.items():
851 for token_hash, token in enc_token_map.items():
852 # NOTE(marcink): this is expensive to calculate, but most secure
852 # NOTE(marcink): this is expensive to calculate, but most secure
853 if crypto_backend.hash_check(auth_token, token_hash):
853 if crypto_backend.hash_check(auth_token, token_hash):
854 match = token
854 match = token
855 break
855 break
856
856
857 if match:
857 if match:
858 log.debug('Found matching token %s', match)
858 log.debug('Found matching token %s', match)
859 if match.repo_id:
859 if match.repo_id:
860 log.debug('Found scope, checking for scope match of token %s', match)
860 log.debug('Found scope, checking for scope match of token %s', match)
861 if match.repo_id == scope_repo_id:
861 if match.repo_id == scope_repo_id:
862 return True
862 return True
863 else:
863 else:
864 log.debug(
864 log.debug(
865 'AUTH_TOKEN: scope mismatch, token has a set repo scope: %s, '
865 'AUTH_TOKEN: scope mismatch, token has a set repo scope: %s, '
866 'and calling scope is:%s, skipping further checks',
866 'and calling scope is:%s, skipping further checks',
867 match.repo, scope_repo_id)
867 match.repo, scope_repo_id)
868 return False
868 return False
869 else:
869 else:
870 return True
870 return True
871
871
872 return False
872 return False
873
873
874 @property
874 @property
875 def ip_addresses(self):
875 def ip_addresses(self):
876 ret = UserIpMap.query().filter(UserIpMap.user == self).all()
876 ret = UserIpMap.query().filter(UserIpMap.user == self).all()
877 return [x.ip_addr for x in ret]
877 return [x.ip_addr for x in ret]
878
878
879 @property
879 @property
880 def username_and_name(self):
880 def username_and_name(self):
881 return f'{self.username} ({self.first_name} {self.last_name})'
881 return f'{self.username} ({self.first_name} {self.last_name})'
882
882
883 @property
883 @property
884 def username_or_name_or_email(self):
884 def username_or_name_or_email(self):
885 full_name = self.full_name if self.full_name != ' ' else None
885 full_name = self.full_name if self.full_name != ' ' else None
886 return self.username or full_name or self.email
886 return self.username or full_name or self.email
887
887
888 @property
888 @property
889 def full_name(self):
889 def full_name(self):
890 return f'{self.first_name} {self.last_name}'
890 return f'{self.first_name} {self.last_name}'
891
891
892 @property
892 @property
893 def full_name_or_username(self):
893 def full_name_or_username(self):
894 return (f'{self.first_name} {self.last_name}'
894 return (f'{self.first_name} {self.last_name}'
895 if (self.first_name and self.last_name) else self.username)
895 if (self.first_name and self.last_name) else self.username)
896
896
897 @property
897 @property
898 def full_contact(self):
898 def full_contact(self):
899 return f'{self.first_name} {self.last_name} <{self.email}>'
899 return f'{self.first_name} {self.last_name} <{self.email}>'
900
900
901 @property
901 @property
902 def short_contact(self):
902 def short_contact(self):
903 return f'{self.first_name} {self.last_name}'
903 return f'{self.first_name} {self.last_name}'
904
904
905 @property
905 @property
906 def is_admin(self):
906 def is_admin(self):
907 return self.admin
907 return self.admin
908
908
909 @property
909 @property
910 def language(self):
910 def language(self):
911 return self.user_data.get('language')
911 return self.user_data.get('language')
912
912
913 def AuthUser(self, **kwargs):
913 def AuthUser(self, **kwargs):
914 """
914 """
915 Returns instance of AuthUser for this user
915 Returns instance of AuthUser for this user
916 """
916 """
917 from rhodecode.lib.auth import AuthUser
917 from rhodecode.lib.auth import AuthUser
918 return AuthUser(user_id=self.user_id, username=self.username, **kwargs)
918 return AuthUser(user_id=self.user_id, username=self.username, **kwargs)
919
919
920 @hybrid_property
920 @hybrid_property
921 def user_data(self):
921 def user_data(self):
922 if not self._user_data:
922 if not self._user_data:
923 return {}
923 return {}
924
924
925 try:
925 try:
926 return json.loads(self._user_data) or {}
926 return json.loads(self._user_data) or {}
927 except TypeError:
927 except TypeError:
928 return {}
928 return {}
929
929
930 @user_data.setter
930 @user_data.setter
931 def user_data(self, val):
931 def user_data(self, val):
932 if not isinstance(val, dict):
932 if not isinstance(val, dict):
933 raise Exception('user_data must be dict, got %s' % type(val))
933 raise Exception('user_data must be dict, got %s' % type(val))
934 try:
934 try:
935 self._user_data = safe_bytes(json.dumps(val))
935 self._user_data = safe_bytes(json.dumps(val))
936 except Exception:
936 except Exception:
937 log.error(traceback.format_exc())
937 log.error(traceback.format_exc())
938
938
939 @classmethod
939 @classmethod
940 def get_by_username(cls, username, case_insensitive=False,
940 def get_by_username(cls, username, case_insensitive=False,
941 cache=False):
941 cache=False):
942
942
943 if case_insensitive:
943 if case_insensitive:
944 q = cls.select().where(
944 q = cls.select().where(
945 func.lower(cls.username) == func.lower(username))
945 func.lower(cls.username) == func.lower(username))
946 else:
946 else:
947 q = cls.select().where(cls.username == username)
947 q = cls.select().where(cls.username == username)
948
948
949 if cache:
949 if cache:
950 hash_key = _hash_key(username)
950 hash_key = _hash_key(username)
951 q = q.options(
951 q = q.options(
952 FromCache("sql_cache_short", f"get_user_by_name_{hash_key}"))
952 FromCache("sql_cache_short", f"get_user_by_name_{hash_key}"))
953
953
954 return cls.execute(q).scalar_one_or_none()
954 return cls.execute(q).scalar_one_or_none()
955
955
956 @classmethod
956 @classmethod
957 def get_by_auth_token(cls, auth_token, cache=False):
957 def get_by_auth_token(cls, auth_token, cache=False):
958
958
959 q = cls.select(User)\
959 q = cls.select(User)\
960 .join(UserApiKeys)\
960 .join(UserApiKeys)\
961 .where(UserApiKeys.api_key == auth_token)\
961 .where(UserApiKeys.api_key == auth_token)\
962 .where(or_(UserApiKeys.expires == -1,
962 .where(or_(UserApiKeys.expires == -1,
963 UserApiKeys.expires >= time.time()))
963 UserApiKeys.expires >= time.time()))
964
964
965 if cache:
965 if cache:
966 q = q.options(
966 q = q.options(
967 FromCache("sql_cache_short", f"get_auth_token_{auth_token}"))
967 FromCache("sql_cache_short", f"get_auth_token_{auth_token}"))
968
968
969 matched_user = cls.execute(q).scalar_one_or_none()
969 matched_user = cls.execute(q).scalar_one_or_none()
970
970
971 return matched_user
971 return matched_user
972
972
973 @classmethod
973 @classmethod
974 def get_by_email(cls, email, case_insensitive=False, cache=False):
974 def get_by_email(cls, email, case_insensitive=False, cache=False):
975
975
976 if case_insensitive:
976 if case_insensitive:
977 q = cls.select().where(func.lower(cls.email) == func.lower(email))
977 q = cls.select().where(func.lower(cls.email) == func.lower(email))
978 else:
978 else:
979 q = cls.select().where(cls.email == email)
979 q = cls.select().where(cls.email == email)
980
980
981 if cache:
981 if cache:
982 email_key = _hash_key(email)
982 email_key = _hash_key(email)
983 q = q.options(
983 q = q.options(
984 FromCache("sql_cache_short", f"get_email_key_{email_key}"))
984 FromCache("sql_cache_short", f"get_email_key_{email_key}"))
985
985
986 ret = cls.execute(q).scalar_one_or_none()
986 ret = cls.execute(q).scalar_one_or_none()
987
987
988 if ret is None:
988 if ret is None:
989 q = cls.select(UserEmailMap)
989 q = cls.select(UserEmailMap)
990 # try fetching in alternate email map
990 # try fetching in alternate email map
991 if case_insensitive:
991 if case_insensitive:
992 q = q.where(func.lower(UserEmailMap.email) == func.lower(email))
992 q = q.where(func.lower(UserEmailMap.email) == func.lower(email))
993 else:
993 else:
994 q = q.where(UserEmailMap.email == email)
994 q = q.where(UserEmailMap.email == email)
995 q = q.options(joinedload(UserEmailMap.user))
995 q = q.options(joinedload(UserEmailMap.user))
996 if cache:
996 if cache:
997 q = q.options(
997 q = q.options(
998 FromCache("sql_cache_short", f"get_email_map_key_{email_key}"))
998 FromCache("sql_cache_short", f"get_email_map_key_{email_key}"))
999
999
1000 result = cls.execute(q).scalar_one_or_none()
1000 result = cls.execute(q).scalar_one_or_none()
1001 ret = getattr(result, 'user', None)
1001 ret = getattr(result, 'user', None)
1002
1002
1003 return ret
1003 return ret
1004
1004
1005 @classmethod
1005 @classmethod
1006 def get_from_cs_author(cls, author):
1006 def get_from_cs_author(cls, author):
1007 """
1007 """
1008 Tries to get User objects out of commit author string
1008 Tries to get User objects out of commit author string
1009
1009
1010 :param author:
1010 :param author:
1011 """
1011 """
1012 from rhodecode.lib.helpers import email, author_name
1012 from rhodecode.lib.helpers import email, author_name
1013 # Valid email in the attribute passed, see if they're in the system
1013 # Valid email in the attribute passed, see if they're in the system
1014 _email = email(author)
1014 _email = email(author)
1015 if _email:
1015 if _email:
1016 user = cls.get_by_email(_email, case_insensitive=True)
1016 user = cls.get_by_email(_email, case_insensitive=True)
1017 if user:
1017 if user:
1018 return user
1018 return user
1019 # Maybe we can match by username?
1019 # Maybe we can match by username?
1020 _author = author_name(author)
1020 _author = author_name(author)
1021 user = cls.get_by_username(_author, case_insensitive=True)
1021 user = cls.get_by_username(_author, case_insensitive=True)
1022 if user:
1022 if user:
1023 return user
1023 return user
1024
1024
1025 def update_userdata(self, **kwargs):
1025 def update_userdata(self, **kwargs):
1026 usr = self
1026 usr = self
1027 old = usr.user_data
1027 old = usr.user_data
1028 old.update(**kwargs)
1028 old.update(**kwargs)
1029 usr.user_data = old
1029 usr.user_data = old
1030 Session().add(usr)
1030 Session().add(usr)
1031 log.debug('updated userdata with %s', kwargs)
1031 log.debug('updated userdata with %s', kwargs)
1032
1032
1033 def update_lastlogin(self):
1033 def update_lastlogin(self):
1034 """Update user lastlogin"""
1034 """Update user lastlogin"""
1035 self.last_login = datetime.datetime.now()
1035 self.last_login = datetime.datetime.now()
1036 Session().add(self)
1036 Session().add(self)
1037 log.debug('updated user %s lastlogin', self.username)
1037 log.debug('updated user %s lastlogin', self.username)
1038
1038
1039 def update_password(self, new_password):
1039 def update_password(self, new_password):
1040 from rhodecode.lib.auth import get_crypt_password
1040 from rhodecode.lib.auth import get_crypt_password
1041
1041
1042 self.password = get_crypt_password(new_password)
1042 self.password = get_crypt_password(new_password)
1043 Session().add(self)
1043 Session().add(self)
1044
1044
1045 @classmethod
1045 @classmethod
1046 def get_first_super_admin(cls):
1046 def get_first_super_admin(cls):
1047 stmt = cls.select().where(User.admin == true()).order_by(User.user_id.asc())
1047 stmt = cls.select().where(User.admin == true()).order_by(User.user_id.asc())
1048 user = cls.scalars(stmt).first()
1048 user = cls.scalars(stmt).first()
1049
1049
1050 if user is None:
1050 if user is None:
1051 raise Exception('FATAL: Missing administrative account!')
1051 raise Exception('FATAL: Missing administrative account!')
1052 return user
1052 return user
1053
1053
1054 @classmethod
1054 @classmethod
1055 def get_all_super_admins(cls, only_active=False):
1055 def get_all_super_admins(cls, only_active=False):
1056 """
1056 """
1057 Returns all admin accounts sorted by username
1057 Returns all admin accounts sorted by username
1058 """
1058 """
1059 qry = User.query().filter(User.admin == true()).order_by(User.username.asc())
1059 qry = User.query().filter(User.admin == true()).order_by(User.username.asc())
1060 if only_active:
1060 if only_active:
1061 qry = qry.filter(User.active == true())
1061 qry = qry.filter(User.active == true())
1062 return qry.all()
1062 return qry.all()
1063
1063
1064 @classmethod
1064 @classmethod
1065 def get_all_user_ids(cls, only_active=True):
1065 def get_all_user_ids(cls, only_active=True):
1066 """
1066 """
1067 Returns all users IDs
1067 Returns all users IDs
1068 """
1068 """
1069 qry = Session().query(User.user_id)
1069 qry = Session().query(User.user_id)
1070
1070
1071 if only_active:
1071 if only_active:
1072 qry = qry.filter(User.active == true())
1072 qry = qry.filter(User.active == true())
1073 return [x.user_id for x in qry]
1073 return [x.user_id for x in qry]
1074
1074
1075 @classmethod
1075 @classmethod
1076 def get_default_user(cls, cache=False, refresh=False):
1076 def get_default_user(cls, cache=False, refresh=False):
1077 user = User.get_by_username(User.DEFAULT_USER, cache=cache)
1077 user = User.get_by_username(User.DEFAULT_USER, cache=cache)
1078 if user is None:
1078 if user is None:
1079 raise Exception('FATAL: Missing default account!')
1079 raise Exception('FATAL: Missing default account!')
1080 if refresh:
1080 if refresh:
1081 # The default user might be based on outdated state which
1081 # The default user might be based on outdated state which
1082 # has been loaded from the cache.
1082 # has been loaded from the cache.
1083 # A call to refresh() ensures that the
1083 # A call to refresh() ensures that the
1084 # latest state from the database is used.
1084 # latest state from the database is used.
1085 Session().refresh(user)
1085 Session().refresh(user)
1086
1086
1087 return user
1087 return user
1088
1088
1089 @classmethod
1089 @classmethod
1090 def get_default_user_id(cls):
1090 def get_default_user_id(cls):
1091 import rhodecode
1091 import rhodecode
1092 return rhodecode.CONFIG['default_user_id']
1092 return rhodecode.CONFIG['default_user_id']
1093
1093
1094 def _get_default_perms(self, user, suffix=''):
1094 def _get_default_perms(self, user, suffix=''):
1095 from rhodecode.model.permission import PermissionModel
1095 from rhodecode.model.permission import PermissionModel
1096 return PermissionModel().get_default_perms(user.user_perms, suffix)
1096 return PermissionModel().get_default_perms(user.user_perms, suffix)
1097
1097
1098 def get_default_perms(self, suffix=''):
1098 def get_default_perms(self, suffix=''):
1099 return self._get_default_perms(self, suffix)
1099 return self._get_default_perms(self, suffix)
1100
1100
1101 def get_api_data(self, include_secrets=False, details='full'):
1101 def get_api_data(self, include_secrets=False, details='full'):
1102 """
1102 """
1103 Common function for generating user related data for API
1103 Common function for generating user related data for API
1104
1104
1105 :param include_secrets: By default secrets in the API data will be replaced
1105 :param include_secrets: By default secrets in the API data will be replaced
1106 by a placeholder value to prevent exposing this data by accident. In case
1106 by a placeholder value to prevent exposing this data by accident. In case
1107 this data shall be exposed, set this flag to ``True``.
1107 this data shall be exposed, set this flag to ``True``.
1108
1108
1109 :param details: details can be 'basic|full' basic gives only a subset of
1109 :param details: details can be 'basic|full' basic gives only a subset of
1110 the available user information that includes user_id, name and emails.
1110 the available user information that includes user_id, name and emails.
1111 """
1111 """
1112 user = self
1112 user = self
1113 user_data = self.user_data
1113 user_data = self.user_data
1114 data = {
1114 data = {
1115 'user_id': user.user_id,
1115 'user_id': user.user_id,
1116 'username': user.username,
1116 'username': user.username,
1117 'firstname': user.name,
1117 'firstname': user.name,
1118 'lastname': user.lastname,
1118 'lastname': user.lastname,
1119 'description': user.description,
1119 'description': user.description,
1120 'email': user.email,
1120 'email': user.email,
1121 'emails': user.emails,
1121 'emails': user.emails,
1122 }
1122 }
1123 if details == 'basic':
1123 if details == 'basic':
1124 return data
1124 return data
1125
1125
1126 auth_token_length = 40
1126 auth_token_length = 40
1127 auth_token_replacement = '*' * auth_token_length
1127 auth_token_replacement = '*' * auth_token_length
1128
1128
1129 extras = {
1129 extras = {
1130 'auth_tokens': [auth_token_replacement],
1130 'auth_tokens': [auth_token_replacement],
1131 'active': user.active,
1131 'active': user.active,
1132 'admin': user.admin,
1132 'admin': user.admin,
1133 'extern_type': user.extern_type,
1133 'extern_type': user.extern_type,
1134 'extern_name': user.extern_name,
1134 'extern_name': user.extern_name,
1135 'last_login': user.last_login,
1135 'last_login': user.last_login,
1136 'last_activity': user.last_activity,
1136 'last_activity': user.last_activity,
1137 'ip_addresses': user.ip_addresses,
1137 'ip_addresses': user.ip_addresses,
1138 'language': user_data.get('language')
1138 'language': user_data.get('language')
1139 }
1139 }
1140 data.update(extras)
1140 data.update(extras)
1141
1141
1142 if include_secrets:
1142 if include_secrets:
1143 data['auth_tokens'] = user.auth_tokens
1143 data['auth_tokens'] = user.auth_tokens
1144 return data
1144 return data
1145
1145
1146 def __json__(self):
1146 def __json__(self):
1147 data = {
1147 data = {
1148 'full_name': self.full_name,
1148 'full_name': self.full_name,
1149 'full_name_or_username': self.full_name_or_username,
1149 'full_name_or_username': self.full_name_or_username,
1150 'short_contact': self.short_contact,
1150 'short_contact': self.short_contact,
1151 'full_contact': self.full_contact,
1151 'full_contact': self.full_contact,
1152 }
1152 }
1153 data.update(self.get_api_data())
1153 data.update(self.get_api_data())
1154 return data
1154 return data
1155
1155
1156
1156
1157 class UserApiKeys(Base, BaseModel):
1157 class UserApiKeys(Base, BaseModel):
1158 __tablename__ = 'user_api_keys'
1158 __tablename__ = 'user_api_keys'
1159 __table_args__ = (
1159 __table_args__ = (
1160 Index('uak_api_key_idx', 'api_key'),
1160 Index('uak_api_key_idx', 'api_key'),
1161 Index('uak_api_key_expires_idx', 'api_key', 'expires'),
1161 Index('uak_api_key_expires_idx', 'api_key', 'expires'),
1162 base_table_args
1162 base_table_args
1163 )
1163 )
1164
1164
1165 # ApiKey role
1165 # ApiKey role
1166 ROLE_ALL = 'token_role_all'
1166 ROLE_ALL = 'token_role_all'
1167 ROLE_VCS = 'token_role_vcs'
1167 ROLE_VCS = 'token_role_vcs'
1168 ROLE_API = 'token_role_api'
1168 ROLE_API = 'token_role_api'
1169 ROLE_HTTP = 'token_role_http'
1169 ROLE_HTTP = 'token_role_http'
1170 ROLE_FEED = 'token_role_feed'
1170 ROLE_FEED = 'token_role_feed'
1171 ROLE_ARTIFACT_DOWNLOAD = 'role_artifact_download'
1171 ROLE_ARTIFACT_DOWNLOAD = 'role_artifact_download'
1172 # The last one is ignored in the list as we only
1172 # The last one is ignored in the list as we only
1173 # use it for one action, and cannot be created by users
1173 # use it for one action, and cannot be created by users
1174 ROLE_PASSWORD_RESET = 'token_password_reset'
1174 ROLE_PASSWORD_RESET = 'token_password_reset'
1175
1175
1176 ROLES = [ROLE_ALL, ROLE_VCS, ROLE_API, ROLE_HTTP, ROLE_FEED, ROLE_ARTIFACT_DOWNLOAD]
1176 ROLES = [ROLE_ALL, ROLE_VCS, ROLE_API, ROLE_HTTP, ROLE_FEED, ROLE_ARTIFACT_DOWNLOAD]
1177
1177
1178 user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1178 user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1179 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1179 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1180 api_key = Column("api_key", String(255), nullable=False, unique=True)
1180 api_key = Column("api_key", String(255), nullable=False, unique=True)
1181 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1181 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1182 expires = Column('expires', Float(53), nullable=False)
1182 expires = Column('expires', Float(53), nullable=False)
1183 role = Column('role', String(255), nullable=True)
1183 role = Column('role', String(255), nullable=True)
1184 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1184 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1185
1185
1186 # scope columns
1186 # scope columns
1187 repo_id = Column(
1187 repo_id = Column(
1188 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
1188 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
1189 nullable=True, unique=None, default=None)
1189 nullable=True, unique=None, default=None)
1190 repo = relationship('Repository', lazy='joined', back_populates='scoped_tokens')
1190 repo = relationship('Repository', lazy='joined', back_populates='scoped_tokens')
1191
1191
1192 repo_group_id = Column(
1192 repo_group_id = Column(
1193 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
1193 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
1194 nullable=True, unique=None, default=None)
1194 nullable=True, unique=None, default=None)
1195 repo_group = relationship('RepoGroup', lazy='joined')
1195 repo_group = relationship('RepoGroup', lazy='joined')
1196
1196
1197 user = relationship('User', lazy='joined', back_populates='user_auth_tokens')
1197 user = relationship('User', lazy='joined', back_populates='user_auth_tokens')
1198
1198
1199 def __repr__(self):
1199 def __repr__(self):
1200 return f"<{self.cls_name}('{self.role}')>"
1200 return f"<{self.cls_name}('{self.role}')>"
1201
1201
1202 def __json__(self):
1202 def __json__(self):
1203 data = {
1203 data = {
1204 'auth_token': self.api_key,
1204 'auth_token': self.api_key,
1205 'role': self.role,
1205 'role': self.role,
1206 'scope': self.scope_humanized,
1206 'scope': self.scope_humanized,
1207 'expired': self.expired
1207 'expired': self.expired
1208 }
1208 }
1209 return data
1209 return data
1210
1210
1211 def get_api_data(self, include_secrets=False):
1211 def get_api_data(self, include_secrets=False):
1212 data = self.__json__()
1212 data = self.__json__()
1213 if include_secrets:
1213 if include_secrets:
1214 return data
1214 return data
1215 else:
1215 else:
1216 data['auth_token'] = self.token_obfuscated
1216 data['auth_token'] = self.token_obfuscated
1217 return data
1217 return data
1218
1218
1219 @hybrid_property
1219 @hybrid_property
1220 def description_safe(self):
1220 def description_safe(self):
1221 from rhodecode.lib import helpers as h
1221 from rhodecode.lib import helpers as h
1222 return h.escape(self.description)
1222 return h.escape(self.description)
1223
1223
1224 @property
1224 @property
1225 def expired(self):
1225 def expired(self):
1226 if self.expires == -1:
1226 if self.expires == -1:
1227 return False
1227 return False
1228 return time.time() > self.expires
1228 return time.time() > self.expires
1229
1229
1230 @classmethod
1230 @classmethod
1231 def _get_role_name(cls, role):
1231 def _get_role_name(cls, role):
1232 return {
1232 return {
1233 cls.ROLE_ALL: _('all'),
1233 cls.ROLE_ALL: _('all'),
1234 cls.ROLE_HTTP: _('http/web interface'),
1234 cls.ROLE_HTTP: _('http/web interface'),
1235 cls.ROLE_VCS: _('vcs (git/hg/svn protocol)'),
1235 cls.ROLE_VCS: _('vcs (git/hg/svn protocol)'),
1236 cls.ROLE_API: _('api calls'),
1236 cls.ROLE_API: _('api calls'),
1237 cls.ROLE_FEED: _('feed access'),
1237 cls.ROLE_FEED: _('feed access'),
1238 cls.ROLE_ARTIFACT_DOWNLOAD: _('artifacts downloads'),
1238 cls.ROLE_ARTIFACT_DOWNLOAD: _('artifacts downloads'),
1239 }.get(role, role)
1239 }.get(role, role)
1240
1240
1241 @classmethod
1241 @classmethod
1242 def _get_role_description(cls, role):
1242 def _get_role_description(cls, role):
1243 return {
1243 return {
1244 cls.ROLE_ALL: _('Token for all actions.'),
1244 cls.ROLE_ALL: _('Token for all actions.'),
1245 cls.ROLE_HTTP: _('Token to access RhodeCode pages via web interface without '
1245 cls.ROLE_HTTP: _('Token to access RhodeCode pages via web interface without '
1246 'login using `api_access_controllers_whitelist` functionality.'),
1246 'login using `api_access_controllers_whitelist` functionality.'),
1247 cls.ROLE_VCS: _('Token to interact over git/hg/svn protocols. '
1247 cls.ROLE_VCS: _('Token to interact over git/hg/svn protocols. '
1248 'Requires auth_token authentication plugin to be active. <br/>'
1248 'Requires auth_token authentication plugin to be active. <br/>'
1249 'Such Token should be used then instead of a password to '
1249 'Such Token should be used then instead of a password to '
1250 'interact with a repository, and additionally can be '
1250 'interact with a repository, and additionally can be '
1251 'limited to single repository using repo scope.'),
1251 'limited to single repository using repo scope.'),
1252 cls.ROLE_API: _('Token limited to api calls.'),
1252 cls.ROLE_API: _('Token limited to api calls.'),
1253 cls.ROLE_FEED: _('Token to read RSS/ATOM feed.'),
1253 cls.ROLE_FEED: _('Token to read RSS/ATOM feed.'),
1254 cls.ROLE_ARTIFACT_DOWNLOAD: _('Token for artifacts downloads.'),
1254 cls.ROLE_ARTIFACT_DOWNLOAD: _('Token for artifacts downloads.'),
1255 }.get(role, role)
1255 }.get(role, role)
1256
1256
1257 @property
1257 @property
1258 def role_humanized(self):
1258 def role_humanized(self):
1259 return self._get_role_name(self.role)
1259 return self._get_role_name(self.role)
1260
1260
1261 def _get_scope(self):
1261 def _get_scope(self):
1262 if self.repo:
1262 if self.repo:
1263 return 'Repository: {}'.format(self.repo.repo_name)
1263 return 'Repository: {}'.format(self.repo.repo_name)
1264 if self.repo_group:
1264 if self.repo_group:
1265 return 'RepositoryGroup: {} (recursive)'.format(self.repo_group.group_name)
1265 return 'RepositoryGroup: {} (recursive)'.format(self.repo_group.group_name)
1266 return 'Global'
1266 return 'Global'
1267
1267
1268 @property
1268 @property
1269 def scope_humanized(self):
1269 def scope_humanized(self):
1270 return self._get_scope()
1270 return self._get_scope()
1271
1271
1272 @property
1272 @property
1273 def token_obfuscated(self):
1273 def token_obfuscated(self):
1274 if self.api_key:
1274 if self.api_key:
1275 return self.api_key[:4] + "****"
1275 return self.api_key[:4] + "****"
1276
1276
1277
1277
1278 class UserEmailMap(Base, BaseModel):
1278 class UserEmailMap(Base, BaseModel):
1279 __tablename__ = 'user_email_map'
1279 __tablename__ = 'user_email_map'
1280 __table_args__ = (
1280 __table_args__ = (
1281 Index('uem_email_idx', 'email'),
1281 Index('uem_email_idx', 'email'),
1282 Index('uem_user_id_idx', 'user_id'),
1282 Index('uem_user_id_idx', 'user_id'),
1283 UniqueConstraint('email'),
1283 UniqueConstraint('email'),
1284 base_table_args
1284 base_table_args
1285 )
1285 )
1286
1286
1287 email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1287 email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1288 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1288 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1289 _email = Column("email", String(255), nullable=True, unique=False, default=None)
1289 _email = Column("email", String(255), nullable=True, unique=False, default=None)
1290 user = relationship('User', lazy='joined', back_populates='user_emails')
1290 user = relationship('User', lazy='joined', back_populates='user_emails')
1291
1291
1292 @validates('_email')
1292 @validates('_email')
1293 def validate_email(self, key, email):
1293 def validate_email(self, key, email):
1294 # check if this email is not main one
1294 # check if this email is not main one
1295 main_email = Session().query(User).filter(User.email == email).scalar()
1295 main_email = Session().query(User).filter(User.email == email).scalar()
1296 if main_email is not None:
1296 if main_email is not None:
1297 raise AttributeError('email %s is present is user table' % email)
1297 raise AttributeError('email %s is present is user table' % email)
1298 return email
1298 return email
1299
1299
1300 @hybrid_property
1300 @hybrid_property
1301 def email(self):
1301 def email(self):
1302 return self._email
1302 return self._email
1303
1303
1304 @email.setter
1304 @email.setter
1305 def email(self, val):
1305 def email(self, val):
1306 self._email = val.lower() if val else None
1306 self._email = val.lower() if val else None
1307
1307
1308
1308
1309 class UserIpMap(Base, BaseModel):
1309 class UserIpMap(Base, BaseModel):
1310 __tablename__ = 'user_ip_map'
1310 __tablename__ = 'user_ip_map'
1311 __table_args__ = (
1311 __table_args__ = (
1312 UniqueConstraint('user_id', 'ip_addr'),
1312 UniqueConstraint('user_id', 'ip_addr'),
1313 base_table_args
1313 base_table_args
1314 )
1314 )
1315
1315
1316 ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1316 ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1317 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1317 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1318 ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None)
1318 ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None)
1319 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
1319 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
1320 description = Column("description", String(10000), nullable=True, unique=None, default=None)
1320 description = Column("description", String(10000), nullable=True, unique=None, default=None)
1321 user = relationship('User', lazy='joined', back_populates='user_ip_map')
1321 user = relationship('User', lazy='joined', back_populates='user_ip_map')
1322
1322
1323 @hybrid_property
1323 @hybrid_property
1324 def description_safe(self):
1324 def description_safe(self):
1325 from rhodecode.lib import helpers as h
1325 from rhodecode.lib import helpers as h
1326 return h.escape(self.description)
1326 return h.escape(self.description)
1327
1327
1328 @classmethod
1328 @classmethod
1329 def _get_ip_range(cls, ip_addr):
1329 def _get_ip_range(cls, ip_addr):
1330 net = ipaddress.ip_network(safe_str(ip_addr), strict=False)
1330 net = ipaddress.ip_network(safe_str(ip_addr), strict=False)
1331 return [str(net.network_address), str(net.broadcast_address)]
1331 return [str(net.network_address), str(net.broadcast_address)]
1332
1332
1333 def __json__(self):
1333 def __json__(self):
1334 return {
1334 return {
1335 'ip_addr': self.ip_addr,
1335 'ip_addr': self.ip_addr,
1336 'ip_range': self._get_ip_range(self.ip_addr),
1336 'ip_range': self._get_ip_range(self.ip_addr),
1337 }
1337 }
1338
1338
1339 def __repr__(self):
1339 def __repr__(self):
1340 return f"<{self.cls_name}('user_id={self.user_id} => ip={self.ip_addr}')>"
1340 return f"<{self.cls_name}('user_id={self.user_id} => ip={self.ip_addr}')>"
1341
1341
1342
1342
1343 class UserSshKeys(Base, BaseModel):
1343 class UserSshKeys(Base, BaseModel):
1344 __tablename__ = 'user_ssh_keys'
1344 __tablename__ = 'user_ssh_keys'
1345 __table_args__ = (
1345 __table_args__ = (
1346 Index('usk_ssh_key_fingerprint_idx', 'ssh_key_fingerprint'),
1346 Index('usk_ssh_key_fingerprint_idx', 'ssh_key_fingerprint'),
1347
1347
1348 UniqueConstraint('ssh_key_fingerprint'),
1348 UniqueConstraint('ssh_key_fingerprint'),
1349
1349
1350 base_table_args
1350 base_table_args
1351 )
1351 )
1352
1352
1353 ssh_key_id = Column('ssh_key_id', Integer(), nullable=False, unique=True, default=None, primary_key=True)
1353 ssh_key_id = Column('ssh_key_id', Integer(), nullable=False, unique=True, default=None, primary_key=True)
1354 ssh_key_data = Column('ssh_key_data', String(10240), nullable=False, unique=None, default=None)
1354 ssh_key_data = Column('ssh_key_data', String(10240), nullable=False, unique=None, default=None)
1355 ssh_key_fingerprint = Column('ssh_key_fingerprint', String(255), nullable=False, unique=None, default=None)
1355 ssh_key_fingerprint = Column('ssh_key_fingerprint', String(255), nullable=False, unique=None, default=None)
1356
1356
1357 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1357 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1358
1358
1359 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1359 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1360 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True, default=None)
1360 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True, default=None)
1361 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1361 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1362
1362
1363 user = relationship('User', lazy='joined', back_populates='user_ssh_keys')
1363 user = relationship('User', lazy='joined', back_populates='user_ssh_keys')
1364
1364
1365 def __json__(self):
1365 def __json__(self):
1366 data = {
1366 data = {
1367 'ssh_fingerprint': self.ssh_key_fingerprint,
1367 'ssh_fingerprint': self.ssh_key_fingerprint,
1368 'description': self.description,
1368 'description': self.description,
1369 'created_on': self.created_on
1369 'created_on': self.created_on
1370 }
1370 }
1371 return data
1371 return data
1372
1372
1373 def get_api_data(self):
1373 def get_api_data(self):
1374 data = self.__json__()
1374 data = self.__json__()
1375 return data
1375 return data
1376
1376
1377
1377
1378 class UserLog(Base, BaseModel):
1378 class UserLog(Base, BaseModel):
1379 __tablename__ = 'user_logs'
1379 __tablename__ = 'user_logs'
1380 __table_args__ = (
1380 __table_args__ = (
1381 base_table_args,
1381 base_table_args,
1382 )
1382 )
1383
1383
1384 VERSION_1 = 'v1'
1384 VERSION_1 = 'v1'
1385 VERSION_2 = 'v2'
1385 VERSION_2 = 'v2'
1386 VERSIONS = [VERSION_1, VERSION_2]
1386 VERSIONS = [VERSION_1, VERSION_2]
1387
1387
1388 user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1388 user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1389 user_id = Column("user_id", Integer(), ForeignKey('users.user_id',ondelete='SET NULL'), nullable=True, unique=None, default=None)
1389 user_id = Column("user_id", Integer(), ForeignKey('users.user_id',ondelete='SET NULL'), nullable=True, unique=None, default=None)
1390 username = Column("username", String(255), nullable=True, unique=None, default=None)
1390 username = Column("username", String(255), nullable=True, unique=None, default=None)
1391 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id', ondelete='SET NULL'), nullable=True, unique=None, default=None)
1391 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id', ondelete='SET NULL'), nullable=True, unique=None, default=None)
1392 repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None)
1392 repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None)
1393 user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None)
1393 user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None)
1394 action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None)
1394 action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None)
1395 action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
1395 action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
1396
1396
1397 version = Column("version", String(255), nullable=True, default=VERSION_1)
1397 version = Column("version", String(255), nullable=True, default=VERSION_1)
1398 user_data = Column('user_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1398 user_data = Column('user_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1399 action_data = Column('action_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1399 action_data = Column('action_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1400 user = relationship('User', cascade='', back_populates='user_log')
1400 user = relationship('User', cascade='', back_populates='user_log')
1401 repository = relationship('Repository', cascade='', back_populates='logs')
1401 repository = relationship('Repository', cascade='', back_populates='logs')
1402
1402
1403 def __repr__(self):
1403 def __repr__(self):
1404 return f"<{self.cls_name}('id:{self.repository_name}:{self.action}')>"
1404 return f"<{self.cls_name}('id:{self.repository_name}:{self.action}')>"
1405
1405
1406 def __json__(self):
1406 def __json__(self):
1407 return {
1407 return {
1408 'user_id': self.user_id,
1408 'user_id': self.user_id,
1409 'username': self.username,
1409 'username': self.username,
1410 'repository_id': self.repository_id,
1410 'repository_id': self.repository_id,
1411 'repository_name': self.repository_name,
1411 'repository_name': self.repository_name,
1412 'user_ip': self.user_ip,
1412 'user_ip': self.user_ip,
1413 'action_date': self.action_date,
1413 'action_date': self.action_date,
1414 'action': self.action,
1414 'action': self.action,
1415 }
1415 }
1416
1416
1417 @hybrid_property
1417 @hybrid_property
1418 def entry_id(self):
1418 def entry_id(self):
1419 return self.user_log_id
1419 return self.user_log_id
1420
1420
1421 @property
1421 @property
1422 def action_as_day(self):
1422 def action_as_day(self):
1423 return datetime.date(*self.action_date.timetuple()[:3])
1423 return datetime.date(*self.action_date.timetuple()[:3])
1424
1424
1425
1425
1426 class UserGroup(Base, BaseModel):
1426 class UserGroup(Base, BaseModel):
1427 __tablename__ = 'users_groups'
1427 __tablename__ = 'users_groups'
1428 __table_args__ = (
1428 __table_args__ = (
1429 base_table_args,
1429 base_table_args,
1430 )
1430 )
1431
1431
1432 users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1432 users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1433 users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None)
1433 users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None)
1434 user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None)
1434 user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None)
1435 users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
1435 users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
1436 inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
1436 inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
1437 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
1437 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
1438 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1438 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1439 _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data
1439 _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data
1440
1440
1441 members = relationship('UserGroupMember', cascade="all, delete-orphan", lazy="joined", back_populates='users_group')
1441 members = relationship('UserGroupMember', cascade="all, delete-orphan", lazy="joined", back_populates='users_group')
1442 users_group_to_perm = relationship('UserGroupToPerm', cascade='all', back_populates='users_group')
1442 users_group_to_perm = relationship('UserGroupToPerm', cascade='all', back_populates='users_group')
1443 users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all', back_populates='users_group')
1443 users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all', back_populates='users_group')
1444 users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all', back_populates='users_group')
1444 users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all', back_populates='users_group')
1445 user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all', back_populates='user_group')
1445 user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all', back_populates='user_group')
1446
1446
1447 user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all', back_populates='target_user_group')
1447 user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all', back_populates='target_user_group')
1448
1448
1449 user_group_review_rules = relationship('RepoReviewRuleUserGroup', cascade='all', back_populates='users_group')
1449 user_group_review_rules = relationship('RepoReviewRuleUserGroup', cascade='all', back_populates='users_group')
1450 user = relationship('User', primaryjoin="User.user_id==UserGroup.user_id", back_populates='user_groups')
1450 user = relationship('User', primaryjoin="User.user_id==UserGroup.user_id", back_populates='user_groups')
1451
1451
1452 @classmethod
1452 @classmethod
1453 def _load_group_data(cls, column):
1453 def _load_group_data(cls, column):
1454 if not column:
1454 if not column:
1455 return {}
1455 return {}
1456
1456
1457 try:
1457 try:
1458 return json.loads(column) or {}
1458 return json.loads(column) or {}
1459 except TypeError:
1459 except TypeError:
1460 return {}
1460 return {}
1461
1461
1462 @hybrid_property
1462 @hybrid_property
1463 def description_safe(self):
1463 def description_safe(self):
1464 from rhodecode.lib import helpers as h
1464 from rhodecode.lib import helpers as h
1465 return h.escape(self.user_group_description)
1465 return h.escape(self.user_group_description)
1466
1466
1467 @hybrid_property
1467 @hybrid_property
1468 def group_data(self):
1468 def group_data(self):
1469 return self._load_group_data(self._group_data)
1469 return self._load_group_data(self._group_data)
1470
1470
1471 @group_data.expression
1471 @group_data.expression
1472 def group_data(self, **kwargs):
1472 def group_data(self, **kwargs):
1473 return self._group_data
1473 return self._group_data
1474
1474
1475 @group_data.setter
1475 @group_data.setter
1476 def group_data(self, val):
1476 def group_data(self, val):
1477 try:
1477 try:
1478 self._group_data = json.dumps(val)
1478 self._group_data = json.dumps(val)
1479 except Exception:
1479 except Exception:
1480 log.error(traceback.format_exc())
1480 log.error(traceback.format_exc())
1481
1481
1482 @classmethod
1482 @classmethod
1483 def _load_sync(cls, group_data):
1483 def _load_sync(cls, group_data):
1484 if group_data:
1484 if group_data:
1485 return group_data.get('extern_type')
1485 return group_data.get('extern_type')
1486
1486
1487 @property
1487 @property
1488 def sync(self):
1488 def sync(self):
1489 return self._load_sync(self.group_data)
1489 return self._load_sync(self.group_data)
1490
1490
1491 def __repr__(self):
1491 def __repr__(self):
1492 return f"<{self.cls_name}('id:{self.users_group_id}:{self.users_group_name}')>"
1492 return f"<{self.cls_name}('id:{self.users_group_id}:{self.users_group_name}')>"
1493
1493
1494 @classmethod
1494 @classmethod
1495 def get_by_group_name(cls, group_name, cache=False,
1495 def get_by_group_name(cls, group_name, cache=False,
1496 case_insensitive=False):
1496 case_insensitive=False):
1497 if case_insensitive:
1497 if case_insensitive:
1498 q = cls.query().filter(func.lower(cls.users_group_name) ==
1498 q = cls.query().filter(func.lower(cls.users_group_name) ==
1499 func.lower(group_name))
1499 func.lower(group_name))
1500
1500
1501 else:
1501 else:
1502 q = cls.query().filter(cls.users_group_name == group_name)
1502 q = cls.query().filter(cls.users_group_name == group_name)
1503 if cache:
1503 if cache:
1504 name_key = _hash_key(group_name)
1504 name_key = _hash_key(group_name)
1505 q = q.options(
1505 q = q.options(
1506 FromCache("sql_cache_short", f"get_group_{name_key}"))
1506 FromCache("sql_cache_short", f"get_group_{name_key}"))
1507 return q.scalar()
1507 return q.scalar()
1508
1508
1509 @classmethod
1509 @classmethod
1510 def get(cls, user_group_id, cache=False):
1510 def get(cls, user_group_id, cache=False):
1511 if not user_group_id:
1511 if not user_group_id:
1512 return
1512 return
1513
1513
1514 user_group = cls.query()
1514 user_group = cls.query()
1515 if cache:
1515 if cache:
1516 user_group = user_group.options(
1516 user_group = user_group.options(
1517 FromCache("sql_cache_short", "get_users_group_%s" % user_group_id))
1517 FromCache("sql_cache_short", "get_users_group_%s" % user_group_id))
1518 return user_group.get(user_group_id)
1518 return user_group.get(user_group_id)
1519
1519
1520 def permissions(self, with_admins=True, with_owner=True,
1520 def permissions(self, with_admins=True, with_owner=True,
1521 expand_from_user_groups=False):
1521 expand_from_user_groups=False):
1522 """
1522 """
1523 Permissions for user groups
1523 Permissions for user groups
1524 """
1524 """
1525 _admin_perm = 'usergroup.admin'
1525 _admin_perm = 'usergroup.admin'
1526
1526
1527 owner_row = []
1527 owner_row = []
1528 if with_owner:
1528 if with_owner:
1529 usr = AttributeDict(self.user.get_dict())
1529 usr = AttributeDict(self.user.get_dict())
1530 usr.owner_row = True
1530 usr.owner_row = True
1531 usr.permission = _admin_perm
1531 usr.permission = _admin_perm
1532 owner_row.append(usr)
1532 owner_row.append(usr)
1533
1533
1534 super_admin_ids = []
1534 super_admin_ids = []
1535 super_admin_rows = []
1535 super_admin_rows = []
1536 if with_admins:
1536 if with_admins:
1537 for usr in User.get_all_super_admins():
1537 for usr in User.get_all_super_admins():
1538 super_admin_ids.append(usr.user_id)
1538 super_admin_ids.append(usr.user_id)
1539 # if this admin is also owner, don't double the record
1539 # if this admin is also owner, don't double the record
1540 if usr.user_id == owner_row[0].user_id:
1540 if usr.user_id == owner_row[0].user_id:
1541 owner_row[0].admin_row = True
1541 owner_row[0].admin_row = True
1542 else:
1542 else:
1543 usr = AttributeDict(usr.get_dict())
1543 usr = AttributeDict(usr.get_dict())
1544 usr.admin_row = True
1544 usr.admin_row = True
1545 usr.permission = _admin_perm
1545 usr.permission = _admin_perm
1546 super_admin_rows.append(usr)
1546 super_admin_rows.append(usr)
1547
1547
1548 q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self)
1548 q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self)
1549 q = q.options(joinedload(UserUserGroupToPerm.user_group),
1549 q = q.options(joinedload(UserUserGroupToPerm.user_group),
1550 joinedload(UserUserGroupToPerm.user),
1550 joinedload(UserUserGroupToPerm.user),
1551 joinedload(UserUserGroupToPerm.permission),)
1551 joinedload(UserUserGroupToPerm.permission),)
1552
1552
1553 # get owners and admins and permissions. We do a trick of re-writing
1553 # get owners and admins and permissions. We do a trick of re-writing
1554 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1554 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1555 # has a global reference and changing one object propagates to all
1555 # has a global reference and changing one object propagates to all
1556 # others. This means if admin is also an owner admin_row that change
1556 # others. This means if admin is also an owner admin_row that change
1557 # would propagate to both objects
1557 # would propagate to both objects
1558 perm_rows = []
1558 perm_rows = []
1559 for _usr in q.all():
1559 for _usr in q.all():
1560 usr = AttributeDict(_usr.user.get_dict())
1560 usr = AttributeDict(_usr.user.get_dict())
1561 # if this user is also owner/admin, mark as duplicate record
1561 # if this user is also owner/admin, mark as duplicate record
1562 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
1562 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
1563 usr.duplicate_perm = True
1563 usr.duplicate_perm = True
1564 usr.permission = _usr.permission.permission_name
1564 usr.permission = _usr.permission.permission_name
1565 perm_rows.append(usr)
1565 perm_rows.append(usr)
1566
1566
1567 # filter the perm rows by 'default' first and then sort them by
1567 # filter the perm rows by 'default' first and then sort them by
1568 # admin,write,read,none permissions sorted again alphabetically in
1568 # admin,write,read,none permissions sorted again alphabetically in
1569 # each group
1569 # each group
1570 perm_rows = sorted(perm_rows, key=display_user_sort)
1570 perm_rows = sorted(perm_rows, key=display_user_sort)
1571
1571
1572 user_groups_rows = []
1572 user_groups_rows = []
1573 if expand_from_user_groups:
1573 if expand_from_user_groups:
1574 for ug in self.permission_user_groups(with_members=True):
1574 for ug in self.permission_user_groups(with_members=True):
1575 for user_data in ug.members:
1575 for user_data in ug.members:
1576 user_groups_rows.append(user_data)
1576 user_groups_rows.append(user_data)
1577
1577
1578 return super_admin_rows + owner_row + perm_rows + user_groups_rows
1578 return super_admin_rows + owner_row + perm_rows + user_groups_rows
1579
1579
1580 def permission_user_groups(self, with_members=False):
1580 def permission_user_groups(self, with_members=False):
1581 q = UserGroupUserGroupToPerm.query()\
1581 q = UserGroupUserGroupToPerm.query()\
1582 .filter(UserGroupUserGroupToPerm.target_user_group == self)
1582 .filter(UserGroupUserGroupToPerm.target_user_group == self)
1583 q = q.options(joinedload(UserGroupUserGroupToPerm.user_group),
1583 q = q.options(joinedload(UserGroupUserGroupToPerm.user_group),
1584 joinedload(UserGroupUserGroupToPerm.target_user_group),
1584 joinedload(UserGroupUserGroupToPerm.target_user_group),
1585 joinedload(UserGroupUserGroupToPerm.permission),)
1585 joinedload(UserGroupUserGroupToPerm.permission),)
1586
1586
1587 perm_rows = []
1587 perm_rows = []
1588 for _user_group in q.all():
1588 for _user_group in q.all():
1589 entry = AttributeDict(_user_group.user_group.get_dict())
1589 entry = AttributeDict(_user_group.user_group.get_dict())
1590 entry.permission = _user_group.permission.permission_name
1590 entry.permission = _user_group.permission.permission_name
1591 if with_members:
1591 if with_members:
1592 entry.members = [x.user.get_dict()
1592 entry.members = [x.user.get_dict()
1593 for x in _user_group.user_group.members]
1593 for x in _user_group.user_group.members]
1594 perm_rows.append(entry)
1594 perm_rows.append(entry)
1595
1595
1596 perm_rows = sorted(perm_rows, key=display_user_group_sort)
1596 perm_rows = sorted(perm_rows, key=display_user_group_sort)
1597 return perm_rows
1597 return perm_rows
1598
1598
1599 def _get_default_perms(self, user_group, suffix=''):
1599 def _get_default_perms(self, user_group, suffix=''):
1600 from rhodecode.model.permission import PermissionModel
1600 from rhodecode.model.permission import PermissionModel
1601 return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix)
1601 return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix)
1602
1602
1603 def get_default_perms(self, suffix=''):
1603 def get_default_perms(self, suffix=''):
1604 return self._get_default_perms(self, suffix)
1604 return self._get_default_perms(self, suffix)
1605
1605
1606 def get_api_data(self, with_group_members=True, include_secrets=False):
1606 def get_api_data(self, with_group_members=True, include_secrets=False):
1607 """
1607 """
1608 :param include_secrets: See :meth:`User.get_api_data`, this parameter is
1608 :param include_secrets: See :meth:`User.get_api_data`, this parameter is
1609 basically forwarded.
1609 basically forwarded.
1610
1610
1611 """
1611 """
1612 user_group = self
1612 user_group = self
1613 data = {
1613 data = {
1614 'users_group_id': user_group.users_group_id,
1614 'users_group_id': user_group.users_group_id,
1615 'group_name': user_group.users_group_name,
1615 'group_name': user_group.users_group_name,
1616 'group_description': user_group.user_group_description,
1616 'group_description': user_group.user_group_description,
1617 'active': user_group.users_group_active,
1617 'active': user_group.users_group_active,
1618 'owner': user_group.user.username,
1618 'owner': user_group.user.username,
1619 'sync': user_group.sync,
1619 'sync': user_group.sync,
1620 'owner_email': user_group.user.email,
1620 'owner_email': user_group.user.email,
1621 }
1621 }
1622
1622
1623 if with_group_members:
1623 if with_group_members:
1624 users = []
1624 users = []
1625 for user in user_group.members:
1625 for user in user_group.members:
1626 user = user.user
1626 user = user.user
1627 users.append(user.get_api_data(include_secrets=include_secrets))
1627 users.append(user.get_api_data(include_secrets=include_secrets))
1628 data['users'] = users
1628 data['users'] = users
1629
1629
1630 return data
1630 return data
1631
1631
1632
1632
1633 class UserGroupMember(Base, BaseModel):
1633 class UserGroupMember(Base, BaseModel):
1634 __tablename__ = 'users_groups_members'
1634 __tablename__ = 'users_groups_members'
1635 __table_args__ = (
1635 __table_args__ = (
1636 base_table_args,
1636 base_table_args,
1637 )
1637 )
1638
1638
1639 users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1639 users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1640 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
1640 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
1641 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
1641 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
1642
1642
1643 user = relationship('User', lazy='joined', back_populates='group_member')
1643 user = relationship('User', lazy='joined', back_populates='group_member')
1644 users_group = relationship('UserGroup', back_populates='members')
1644 users_group = relationship('UserGroup', back_populates='members')
1645
1645
1646 def __init__(self, gr_id='', u_id=''):
1646 def __init__(self, gr_id='', u_id=''):
1647 self.users_group_id = gr_id
1647 self.users_group_id = gr_id
1648 self.user_id = u_id
1648 self.user_id = u_id
1649
1649
1650
1650
1651 class RepositoryField(Base, BaseModel):
1651 class RepositoryField(Base, BaseModel):
1652 __tablename__ = 'repositories_fields'
1652 __tablename__ = 'repositories_fields'
1653 __table_args__ = (
1653 __table_args__ = (
1654 UniqueConstraint('repository_id', 'field_key'), # no-multi field
1654 UniqueConstraint('repository_id', 'field_key'), # no-multi field
1655 base_table_args,
1655 base_table_args,
1656 )
1656 )
1657
1657
1658 PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields
1658 PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields
1659
1659
1660 repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1660 repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1661 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
1661 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
1662 field_key = Column("field_key", String(250))
1662 field_key = Column("field_key", String(250))
1663 field_label = Column("field_label", String(1024), nullable=False)
1663 field_label = Column("field_label", String(1024), nullable=False)
1664 field_value = Column("field_value", String(10000), nullable=False)
1664 field_value = Column("field_value", String(10000), nullable=False)
1665 field_desc = Column("field_desc", String(1024), nullable=False)
1665 field_desc = Column("field_desc", String(1024), nullable=False)
1666 field_type = Column("field_type", String(255), nullable=False, unique=None)
1666 field_type = Column("field_type", String(255), nullable=False, unique=None)
1667 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1667 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1668
1668
1669 repository = relationship('Repository', back_populates='extra_fields')
1669 repository = relationship('Repository', back_populates='extra_fields')
1670
1670
1671 @property
1671 @property
1672 def field_key_prefixed(self):
1672 def field_key_prefixed(self):
1673 return 'ex_%s' % self.field_key
1673 return 'ex_%s' % self.field_key
1674
1674
1675 @classmethod
1675 @classmethod
1676 def un_prefix_key(cls, key):
1676 def un_prefix_key(cls, key):
1677 if key.startswith(cls.PREFIX):
1677 if key.startswith(cls.PREFIX):
1678 return key[len(cls.PREFIX):]
1678 return key[len(cls.PREFIX):]
1679 return key
1679 return key
1680
1680
1681 @classmethod
1681 @classmethod
1682 def get_by_key_name(cls, key, repo):
1682 def get_by_key_name(cls, key, repo):
1683 row = cls.query()\
1683 row = cls.query()\
1684 .filter(cls.repository == repo)\
1684 .filter(cls.repository == repo)\
1685 .filter(cls.field_key == key).scalar()
1685 .filter(cls.field_key == key).scalar()
1686 return row
1686 return row
1687
1687
1688
1688
1689 class Repository(Base, BaseModel):
1689 class Repository(Base, BaseModel):
1690 __tablename__ = 'repositories'
1690 __tablename__ = 'repositories'
1691 __table_args__ = (
1691 __table_args__ = (
1692 Index('r_repo_name_idx', 'repo_name', mysql_length=255),
1692 Index('r_repo_name_idx', 'repo_name', mysql_length=255),
1693 base_table_args,
1693 base_table_args,
1694 )
1694 )
1695 DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
1695 DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
1696 DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}'
1696 DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}'
1697 DEFAULT_CLONE_URI_SSH = 'ssh://{sys_user}@{hostname}/{repo}'
1697 DEFAULT_CLONE_URI_SSH = 'ssh://{sys_user}@{hostname}/{repo}'
1698
1698
1699 STATE_CREATED = 'repo_state_created'
1699 STATE_CREATED = 'repo_state_created'
1700 STATE_PENDING = 'repo_state_pending'
1700 STATE_PENDING = 'repo_state_pending'
1701 STATE_ERROR = 'repo_state_error'
1701 STATE_ERROR = 'repo_state_error'
1702
1702
1703 LOCK_AUTOMATIC = 'lock_auto'
1703 LOCK_AUTOMATIC = 'lock_auto'
1704 LOCK_API = 'lock_api'
1704 LOCK_API = 'lock_api'
1705 LOCK_WEB = 'lock_web'
1705 LOCK_WEB = 'lock_web'
1706 LOCK_PULL = 'lock_pull'
1706 LOCK_PULL = 'lock_pull'
1707
1707
1708 NAME_SEP = URL_SEP
1708 NAME_SEP = URL_SEP
1709
1709
1710 repo_id = Column(
1710 repo_id = Column(
1711 "repo_id", Integer(), nullable=False, unique=True, default=None,
1711 "repo_id", Integer(), nullable=False, unique=True, default=None,
1712 primary_key=True)
1712 primary_key=True)
1713 _repo_name = Column(
1713 _repo_name = Column(
1714 "repo_name", Text(), nullable=False, default=None)
1714 "repo_name", Text(), nullable=False, default=None)
1715 repo_name_hash = Column(
1715 repo_name_hash = Column(
1716 "repo_name_hash", String(255), nullable=False, unique=True)
1716 "repo_name_hash", String(255), nullable=False, unique=True)
1717 repo_state = Column("repo_state", String(255), nullable=True)
1717 repo_state = Column("repo_state", String(255), nullable=True)
1718
1718
1719 clone_uri = Column(
1719 clone_uri = Column(
1720 "clone_uri", EncryptedTextValue(), nullable=True, unique=False,
1720 "clone_uri", EncryptedTextValue(), nullable=True, unique=False,
1721 default=None)
1721 default=None)
1722 push_uri = Column(
1722 push_uri = Column(
1723 "push_uri", EncryptedTextValue(), nullable=True, unique=False,
1723 "push_uri", EncryptedTextValue(), nullable=True, unique=False,
1724 default=None)
1724 default=None)
1725 repo_type = Column(
1725 repo_type = Column(
1726 "repo_type", String(255), nullable=False, unique=False, default=None)
1726 "repo_type", String(255), nullable=False, unique=False, default=None)
1727 user_id = Column(
1727 user_id = Column(
1728 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
1728 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
1729 unique=False, default=None)
1729 unique=False, default=None)
1730 private = Column(
1730 private = Column(
1731 "private", Boolean(), nullable=True, unique=None, default=None)
1731 "private", Boolean(), nullable=True, unique=None, default=None)
1732 archived = Column(
1732 archived = Column(
1733 "archived", Boolean(), nullable=True, unique=None, default=None)
1733 "archived", Boolean(), nullable=True, unique=None, default=None)
1734 enable_statistics = Column(
1734 enable_statistics = Column(
1735 "statistics", Boolean(), nullable=True, unique=None, default=True)
1735 "statistics", Boolean(), nullable=True, unique=None, default=True)
1736 enable_downloads = Column(
1736 enable_downloads = Column(
1737 "downloads", Boolean(), nullable=True, unique=None, default=True)
1737 "downloads", Boolean(), nullable=True, unique=None, default=True)
1738 description = Column(
1738 description = Column(
1739 "description", String(10000), nullable=True, unique=None, default=None)
1739 "description", String(10000), nullable=True, unique=None, default=None)
1740 created_on = Column(
1740 created_on = Column(
1741 'created_on', DateTime(timezone=False), nullable=True, unique=None,
1741 'created_on', DateTime(timezone=False), nullable=True, unique=None,
1742 default=datetime.datetime.now)
1742 default=datetime.datetime.now)
1743 updated_on = Column(
1743 updated_on = Column(
1744 'updated_on', DateTime(timezone=False), nullable=True, unique=None,
1744 'updated_on', DateTime(timezone=False), nullable=True, unique=None,
1745 default=datetime.datetime.now)
1745 default=datetime.datetime.now)
1746 _landing_revision = Column(
1746 _landing_revision = Column(
1747 "landing_revision", String(255), nullable=False, unique=False,
1747 "landing_revision", String(255), nullable=False, unique=False,
1748 default=None)
1748 default=None)
1749 enable_locking = Column(
1749 enable_locking = Column(
1750 "enable_locking", Boolean(), nullable=False, unique=None,
1750 "enable_locking", Boolean(), nullable=False, unique=None,
1751 default=False)
1751 default=False)
1752 _locked = Column(
1752 _locked = Column(
1753 "locked", String(255), nullable=True, unique=False, default=None)
1753 "locked", String(255), nullable=True, unique=False, default=None)
1754 _changeset_cache = Column(
1754 _changeset_cache = Column(
1755 "changeset_cache", LargeBinary(), nullable=True) # JSON data
1755 "changeset_cache", LargeBinary(), nullable=True) # JSON data
1756
1756
1757 fork_id = Column(
1757 fork_id = Column(
1758 "fork_id", Integer(), ForeignKey('repositories.repo_id'),
1758 "fork_id", Integer(), ForeignKey('repositories.repo_id'),
1759 nullable=True, unique=False, default=None)
1759 nullable=True, unique=False, default=None)
1760 group_id = Column(
1760 group_id = Column(
1761 "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True,
1761 "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True,
1762 unique=False, default=None)
1762 unique=False, default=None)
1763
1763
1764 user = relationship('User', lazy='joined', back_populates='repositories')
1764 user = relationship('User', lazy='joined', back_populates='repositories')
1765 fork = relationship('Repository', remote_side=repo_id, lazy='joined')
1765 fork = relationship('Repository', remote_side=repo_id, lazy='joined')
1766 group = relationship('RepoGroup', lazy='joined')
1766 group = relationship('RepoGroup', lazy='joined')
1767 repo_to_perm = relationship('UserRepoToPerm', cascade='all', order_by='UserRepoToPerm.repo_to_perm_id')
1767 repo_to_perm = relationship('UserRepoToPerm', cascade='all', order_by='UserRepoToPerm.repo_to_perm_id')
1768 users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all', back_populates='repository')
1768 users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all', back_populates='repository')
1769 stats = relationship('Statistics', cascade='all', uselist=False)
1769 stats = relationship('Statistics', cascade='all', uselist=False)
1770
1770
1771 followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id', cascade='all', back_populates='follows_repository')
1771 followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id', cascade='all', back_populates='follows_repository')
1772 extra_fields = relationship('RepositoryField', cascade="all, delete-orphan", back_populates='repository')
1772 extra_fields = relationship('RepositoryField', cascade="all, delete-orphan", back_populates='repository')
1773
1773
1774 logs = relationship('UserLog', back_populates='repository')
1774 logs = relationship('UserLog', back_populates='repository')
1775
1775
1776 comments = relationship('ChangesetComment', cascade="all, delete-orphan", back_populates='repo')
1776 comments = relationship('ChangesetComment', cascade="all, delete-orphan", back_populates='repo')
1777
1777
1778 pull_requests_source = relationship(
1778 pull_requests_source = relationship(
1779 'PullRequest',
1779 'PullRequest',
1780 primaryjoin='PullRequest.source_repo_id==Repository.repo_id',
1780 primaryjoin='PullRequest.source_repo_id==Repository.repo_id',
1781 cascade="all, delete-orphan",
1781 cascade="all, delete-orphan",
1782 #back_populates="pr_source"
1782 #back_populates="pr_source"
1783 )
1783 )
1784 pull_requests_target = relationship(
1784 pull_requests_target = relationship(
1785 'PullRequest',
1785 'PullRequest',
1786 primaryjoin='PullRequest.target_repo_id==Repository.repo_id',
1786 primaryjoin='PullRequest.target_repo_id==Repository.repo_id',
1787 cascade="all, delete-orphan",
1787 cascade="all, delete-orphan",
1788 #back_populates="pr_target"
1788 #back_populates="pr_target"
1789 )
1789 )
1790
1790
1791 ui = relationship('RepoRhodeCodeUi', cascade="all")
1791 ui = relationship('RepoRhodeCodeUi', cascade="all")
1792 settings = relationship('RepoRhodeCodeSetting', cascade="all")
1792 settings = relationship('RepoRhodeCodeSetting', cascade="all")
1793 integrations = relationship('Integration', cascade="all, delete-orphan", back_populates='repo')
1793 integrations = relationship('Integration', cascade="all, delete-orphan", back_populates='repo')
1794
1794
1795 scoped_tokens = relationship('UserApiKeys', cascade="all", back_populates='repo')
1795 scoped_tokens = relationship('UserApiKeys', cascade="all", back_populates='repo')
1796
1796
1797 # no cascade, set NULL
1797 # no cascade, set NULL
1798 artifacts = relationship('FileStore', primaryjoin='FileStore.scope_repo_id==Repository.repo_id', viewonly=True)
1798 artifacts = relationship('FileStore', primaryjoin='FileStore.scope_repo_id==Repository.repo_id', viewonly=True)
1799
1799
1800 review_rules = relationship('RepoReviewRule')
1800 review_rules = relationship('RepoReviewRule')
1801 user_branch_perms = relationship('UserToRepoBranchPermission')
1801 user_branch_perms = relationship('UserToRepoBranchPermission')
1802 user_group_branch_perms = relationship('UserGroupToRepoBranchPermission')
1802 user_group_branch_perms = relationship('UserGroupToRepoBranchPermission')
1803
1803
1804 def __repr__(self):
1804 def __repr__(self):
1805 return "<%s('%s:%s')>" % (self.cls_name, self.repo_id, self.repo_name)
1805 return "<%s('%s:%s')>" % (self.cls_name, self.repo_id, self.repo_name)
1806
1806
1807 @hybrid_property
1807 @hybrid_property
1808 def description_safe(self):
1808 def description_safe(self):
1809 from rhodecode.lib import helpers as h
1809 from rhodecode.lib import helpers as h
1810 return h.escape(self.description)
1810 return h.escape(self.description)
1811
1811
1812 @hybrid_property
1812 @hybrid_property
1813 def landing_rev(self):
1813 def landing_rev(self):
1814 # always should return [rev_type, rev], e.g ['branch', 'master']
1814 # always should return [rev_type, rev], e.g ['branch', 'master']
1815 if self._landing_revision:
1815 if self._landing_revision:
1816 _rev_info = self._landing_revision.split(':')
1816 _rev_info = self._landing_revision.split(':')
1817 if len(_rev_info) < 2:
1817 if len(_rev_info) < 2:
1818 _rev_info.insert(0, 'rev')
1818 _rev_info.insert(0, 'rev')
1819 return [_rev_info[0], _rev_info[1]]
1819 return [_rev_info[0], _rev_info[1]]
1820 return [None, None]
1820 return [None, None]
1821
1821
1822 @property
1822 @property
1823 def landing_ref_type(self):
1823 def landing_ref_type(self):
1824 return self.landing_rev[0]
1824 return self.landing_rev[0]
1825
1825
1826 @property
1826 @property
1827 def landing_ref_name(self):
1827 def landing_ref_name(self):
1828 return self.landing_rev[1]
1828 return self.landing_rev[1]
1829
1829
1830 @landing_rev.setter
1830 @landing_rev.setter
1831 def landing_rev(self, val):
1831 def landing_rev(self, val):
1832 if ':' not in val:
1832 if ':' not in val:
1833 raise ValueError('value must be delimited with `:` and consist '
1833 raise ValueError('value must be delimited with `:` and consist '
1834 'of <rev_type>:<rev>, got %s instead' % val)
1834 'of <rev_type>:<rev>, got %s instead' % val)
1835 self._landing_revision = val
1835 self._landing_revision = val
1836
1836
1837 @hybrid_property
1837 @hybrid_property
1838 def locked(self):
1838 def locked(self):
1839 if self._locked:
1839 if self._locked:
1840 user_id, timelocked, reason = self._locked.split(':')
1840 user_id, timelocked, reason = self._locked.split(':')
1841 lock_values = int(user_id), timelocked, reason
1841 lock_values = int(user_id), timelocked, reason
1842 else:
1842 else:
1843 lock_values = [None, None, None]
1843 lock_values = [None, None, None]
1844 return lock_values
1844 return lock_values
1845
1845
1846 @locked.setter
1846 @locked.setter
1847 def locked(self, val):
1847 def locked(self, val):
1848 if val and isinstance(val, (list, tuple)):
1848 if val and isinstance(val, (list, tuple)):
1849 self._locked = ':'.join(map(str, val))
1849 self._locked = ':'.join(map(str, val))
1850 else:
1850 else:
1851 self._locked = None
1851 self._locked = None
1852
1852
1853 @classmethod
1853 @classmethod
1854 def _load_changeset_cache(cls, repo_id, changeset_cache_raw):
1854 def _load_changeset_cache(cls, repo_id, changeset_cache_raw):
1855 from rhodecode.lib.vcs.backends.base import EmptyCommit
1855 from rhodecode.lib.vcs.backends.base import EmptyCommit
1856 dummy = EmptyCommit().__json__()
1856 dummy = EmptyCommit().__json__()
1857 if not changeset_cache_raw:
1857 if not changeset_cache_raw:
1858 dummy['source_repo_id'] = repo_id
1858 dummy['source_repo_id'] = repo_id
1859 return json.loads(json.dumps(dummy))
1859 return json.loads(json.dumps(dummy))
1860
1860
1861 try:
1861 try:
1862 return json.loads(changeset_cache_raw)
1862 return json.loads(changeset_cache_raw)
1863 except TypeError:
1863 except TypeError:
1864 return dummy
1864 return dummy
1865 except Exception:
1865 except Exception:
1866 log.error(traceback.format_exc())
1866 log.error(traceback.format_exc())
1867 return dummy
1867 return dummy
1868
1868
1869 @hybrid_property
1869 @hybrid_property
1870 def changeset_cache(self):
1870 def changeset_cache(self):
1871 return self._load_changeset_cache(self.repo_id, self._changeset_cache)
1871 return self._load_changeset_cache(self.repo_id, self._changeset_cache)
1872
1872
1873 @changeset_cache.setter
1873 @changeset_cache.setter
1874 def changeset_cache(self, val):
1874 def changeset_cache(self, val):
1875 try:
1875 try:
1876 self._changeset_cache = json.dumps(val)
1876 self._changeset_cache = json.dumps(val)
1877 except Exception:
1877 except Exception:
1878 log.error(traceback.format_exc())
1878 log.error(traceback.format_exc())
1879
1879
1880 @hybrid_property
1880 @hybrid_property
1881 def repo_name(self):
1881 def repo_name(self):
1882 return self._repo_name
1882 return self._repo_name
1883
1883
1884 @repo_name.setter
1884 @repo_name.setter
1885 def repo_name(self, value):
1885 def repo_name(self, value):
1886 self._repo_name = value
1886 self._repo_name = value
1887 self.repo_name_hash = sha1(safe_bytes(value))
1887 self.repo_name_hash = sha1(safe_bytes(value))
1888
1888
1889 @classmethod
1889 @classmethod
1890 def normalize_repo_name(cls, repo_name):
1890 def normalize_repo_name(cls, repo_name):
1891 """
1891 """
1892 Normalizes os specific repo_name to the format internally stored inside
1892 Normalizes os specific repo_name to the format internally stored inside
1893 database using URL_SEP
1893 database using URL_SEP
1894
1894
1895 :param cls:
1895 :param cls:
1896 :param repo_name:
1896 :param repo_name:
1897 """
1897 """
1898 return cls.NAME_SEP.join(repo_name.split(os.sep))
1898 return cls.NAME_SEP.join(repo_name.split(os.sep))
1899
1899
1900 @classmethod
1900 @classmethod
1901 def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False):
1901 def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False):
1902 session = Session()
1902 session = Session()
1903 q = session.query(cls).filter(cls.repo_name == repo_name)
1903 q = session.query(cls).filter(cls.repo_name == repo_name)
1904
1904
1905 if cache:
1905 if cache:
1906 if identity_cache:
1906 if identity_cache:
1907 val = cls.identity_cache(session, 'repo_name', repo_name)
1907 val = cls.identity_cache(session, 'repo_name', repo_name)
1908 if val:
1908 if val:
1909 return val
1909 return val
1910 else:
1910 else:
1911 cache_key = "get_repo_by_name_%s" % _hash_key(repo_name)
1911 cache_key = "get_repo_by_name_%s" % _hash_key(repo_name)
1912 q = q.options(
1912 q = q.options(
1913 FromCache("sql_cache_short", cache_key))
1913 FromCache("sql_cache_short", cache_key))
1914
1914
1915 return q.scalar()
1915 return q.scalar()
1916
1916
1917 @classmethod
1917 @classmethod
1918 def get_by_id_or_repo_name(cls, repoid):
1918 def get_by_id_or_repo_name(cls, repoid):
1919 if isinstance(repoid, int):
1919 if isinstance(repoid, int):
1920 try:
1920 try:
1921 repo = cls.get(repoid)
1921 repo = cls.get(repoid)
1922 except ValueError:
1922 except ValueError:
1923 repo = None
1923 repo = None
1924 else:
1924 else:
1925 repo = cls.get_by_repo_name(repoid)
1925 repo = cls.get_by_repo_name(repoid)
1926 return repo
1926 return repo
1927
1927
1928 @classmethod
1928 @classmethod
1929 def get_by_full_path(cls, repo_full_path):
1929 def get_by_full_path(cls, repo_full_path):
1930 repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
1930 repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
1931 repo_name = cls.normalize_repo_name(repo_name)
1931 repo_name = cls.normalize_repo_name(repo_name)
1932 return cls.get_by_repo_name(repo_name.strip(URL_SEP))
1932 return cls.get_by_repo_name(repo_name.strip(URL_SEP))
1933
1933
1934 @classmethod
1934 @classmethod
1935 def get_repo_forks(cls, repo_id):
1935 def get_repo_forks(cls, repo_id):
1936 return cls.query().filter(Repository.fork_id == repo_id)
1936 return cls.query().filter(Repository.fork_id == repo_id)
1937
1937
1938 @classmethod
1938 @classmethod
1939 def base_path(cls):
1939 def base_path(cls):
1940 """
1940 """
1941 Returns base path when all repos are stored
1941 Returns base path when all repos are stored
1942
1942
1943 :param cls:
1943 :param cls:
1944 """
1944 """
1945 from rhodecode.lib.utils import get_rhodecode_base_path
1945 from rhodecode.lib.utils import get_rhodecode_base_path
1946 return get_rhodecode_base_path()
1946 return get_rhodecode_base_path()
1947
1947
1948 @classmethod
1948 @classmethod
1949 def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None),
1949 def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None),
1950 case_insensitive=True, archived=False):
1950 case_insensitive=True, archived=False):
1951 q = Repository.query()
1951 q = Repository.query()
1952
1952
1953 if not archived:
1953 if not archived:
1954 q = q.filter(Repository.archived.isnot(true()))
1954 q = q.filter(Repository.archived.isnot(true()))
1955
1955
1956 if not isinstance(user_id, Optional):
1956 if not isinstance(user_id, Optional):
1957 q = q.filter(Repository.user_id == user_id)
1957 q = q.filter(Repository.user_id == user_id)
1958
1958
1959 if not isinstance(group_id, Optional):
1959 if not isinstance(group_id, Optional):
1960 q = q.filter(Repository.group_id == group_id)
1960 q = q.filter(Repository.group_id == group_id)
1961
1961
1962 if case_insensitive:
1962 if case_insensitive:
1963 q = q.order_by(func.lower(Repository.repo_name))
1963 q = q.order_by(func.lower(Repository.repo_name))
1964 else:
1964 else:
1965 q = q.order_by(Repository.repo_name)
1965 q = q.order_by(Repository.repo_name)
1966
1966
1967 return q.all()
1967 return q.all()
1968
1968
1969 @property
1969 @property
1970 def repo_uid(self):
1970 def repo_uid(self):
1971 return '_{}'.format(self.repo_id)
1971 return '_{}'.format(self.repo_id)
1972
1972
1973 @property
1973 @property
1974 def forks(self):
1974 def forks(self):
1975 """
1975 """
1976 Return forks of this repo
1976 Return forks of this repo
1977 """
1977 """
1978 return Repository.get_repo_forks(self.repo_id)
1978 return Repository.get_repo_forks(self.repo_id)
1979
1979
1980 @property
1980 @property
1981 def parent(self):
1981 def parent(self):
1982 """
1982 """
1983 Returns fork parent
1983 Returns fork parent
1984 """
1984 """
1985 return self.fork
1985 return self.fork
1986
1986
1987 @property
1987 @property
1988 def just_name(self):
1988 def just_name(self):
1989 return self.repo_name.split(self.NAME_SEP)[-1]
1989 return self.repo_name.split(self.NAME_SEP)[-1]
1990
1990
1991 @property
1991 @property
1992 def groups_with_parents(self):
1992 def groups_with_parents(self):
1993 groups = []
1993 groups = []
1994 if self.group is None:
1994 if self.group is None:
1995 return groups
1995 return groups
1996
1996
1997 cur_gr = self.group
1997 cur_gr = self.group
1998 groups.insert(0, cur_gr)
1998 groups.insert(0, cur_gr)
1999 while 1:
1999 while 1:
2000 gr = getattr(cur_gr, 'parent_group', None)
2000 gr = getattr(cur_gr, 'parent_group', None)
2001 cur_gr = cur_gr.parent_group
2001 cur_gr = cur_gr.parent_group
2002 if gr is None:
2002 if gr is None:
2003 break
2003 break
2004 groups.insert(0, gr)
2004 groups.insert(0, gr)
2005
2005
2006 return groups
2006 return groups
2007
2007
2008 @property
2008 @property
2009 def groups_and_repo(self):
2009 def groups_and_repo(self):
2010 return self.groups_with_parents, self
2010 return self.groups_with_parents, self
2011
2011
2012 @LazyProperty
2012 @LazyProperty
2013 def repo_path(self):
2013 def repo_path(self):
2014 """
2014 """
2015 Returns base full path for that repository means where it actually
2015 Returns base full path for that repository means where it actually
2016 exists on a filesystem
2016 exists on a filesystem
2017 """
2017 """
2018 q = Session().query(RhodeCodeUi).filter(
2018 q = Session().query(RhodeCodeUi).filter(
2019 RhodeCodeUi.ui_key == self.NAME_SEP)
2019 RhodeCodeUi.ui_key == self.NAME_SEP)
2020 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
2020 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
2021 return q.one().ui_value
2021 return q.one().ui_value
2022
2022
2023 @property
2023 @property
2024 def repo_full_path(self):
2024 def repo_full_path(self):
2025 p = [self.repo_path]
2025 p = [self.repo_path]
2026 # we need to split the name by / since this is how we store the
2026 # we need to split the name by / since this is how we store the
2027 # names in the database, but that eventually needs to be converted
2027 # names in the database, but that eventually needs to be converted
2028 # into a valid system path
2028 # into a valid system path
2029 p += self.repo_name.split(self.NAME_SEP)
2029 p += self.repo_name.split(self.NAME_SEP)
2030 return os.path.join(*map(safe_str, p))
2030 return os.path.join(*map(safe_str, p))
2031
2031
2032 @property
2032 @property
2033 def cache_keys(self):
2033 def cache_keys(self):
2034 """
2034 """
2035 Returns associated cache keys for that repo
2035 Returns associated cache keys for that repo
2036 """
2036 """
2037 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
2037 repo_namespace_key = CacheKey.REPO_INVALIDATION_NAMESPACE.format(repo_id=self.repo_id)
2038 repo_id=self.repo_id)
2039 return CacheKey.query()\
2038 return CacheKey.query()\
2040 .filter(CacheKey.cache_args == invalidation_namespace)\
2039 .filter(CacheKey.cache_key == repo_namespace_key)\
2041 .order_by(CacheKey.cache_key)\
2040 .order_by(CacheKey.cache_key)\
2042 .all()
2041 .all()
2043
2042
2044 @property
2043 @property
2045 def cached_diffs_relative_dir(self):
2044 def cached_diffs_relative_dir(self):
2046 """
2045 """
2047 Return a relative to the repository store path of cached diffs
2046 Return a relative to the repository store path of cached diffs
2048 used for safe display for users, who shouldn't know the absolute store
2047 used for safe display for users, who shouldn't know the absolute store
2049 path
2048 path
2050 """
2049 """
2051 return os.path.join(
2050 return os.path.join(
2052 os.path.dirname(self.repo_name),
2051 os.path.dirname(self.repo_name),
2053 self.cached_diffs_dir.split(os.path.sep)[-1])
2052 self.cached_diffs_dir.split(os.path.sep)[-1])
2054
2053
2055 @property
2054 @property
2056 def cached_diffs_dir(self):
2055 def cached_diffs_dir(self):
2057 path = self.repo_full_path
2056 path = self.repo_full_path
2058 return os.path.join(
2057 return os.path.join(
2059 os.path.dirname(path),
2058 os.path.dirname(path),
2060 f'.__shadow_diff_cache_repo_{self.repo_id}')
2059 f'.__shadow_diff_cache_repo_{self.repo_id}')
2061
2060
2062 def cached_diffs(self):
2061 def cached_diffs(self):
2063 diff_cache_dir = self.cached_diffs_dir
2062 diff_cache_dir = self.cached_diffs_dir
2064 if os.path.isdir(diff_cache_dir):
2063 if os.path.isdir(diff_cache_dir):
2065 return os.listdir(diff_cache_dir)
2064 return os.listdir(diff_cache_dir)
2066 return []
2065 return []
2067
2066
2068 def shadow_repos(self):
2067 def shadow_repos(self):
2069 shadow_repos_pattern = f'.__shadow_repo_{self.repo_id}'
2068 shadow_repos_pattern = f'.__shadow_repo_{self.repo_id}'
2070 return [
2069 return [
2071 x for x in os.listdir(os.path.dirname(self.repo_full_path))
2070 x for x in os.listdir(os.path.dirname(self.repo_full_path))
2072 if x.startswith(shadow_repos_pattern)
2071 if x.startswith(shadow_repos_pattern)
2073 ]
2072 ]
2074
2073
2075 def get_new_name(self, repo_name):
2074 def get_new_name(self, repo_name):
2076 """
2075 """
2077 returns new full repository name based on assigned group and new new
2076 returns new full repository name based on assigned group and new new
2078
2077
2079 :param repo_name:
2078 :param repo_name:
2080 """
2079 """
2081 path_prefix = self.group.full_path_splitted if self.group else []
2080 path_prefix = self.group.full_path_splitted if self.group else []
2082 return self.NAME_SEP.join(path_prefix + [repo_name])
2081 return self.NAME_SEP.join(path_prefix + [repo_name])
2083
2082
2084 @property
2083 @property
2085 def _config(self):
2084 def _config(self):
2086 """
2085 """
2087 Returns db based config object.
2086 Returns db based config object.
2088 """
2087 """
2089 from rhodecode.lib.utils import make_db_config
2088 from rhodecode.lib.utils import make_db_config
2090 return make_db_config(clear_session=False, repo=self)
2089 return make_db_config(clear_session=False, repo=self)
2091
2090
2092 def permissions(self, with_admins=True, with_owner=True,
2091 def permissions(self, with_admins=True, with_owner=True,
2093 expand_from_user_groups=False):
2092 expand_from_user_groups=False):
2094 """
2093 """
2095 Permissions for repositories
2094 Permissions for repositories
2096 """
2095 """
2097 _admin_perm = 'repository.admin'
2096 _admin_perm = 'repository.admin'
2098
2097
2099 owner_row = []
2098 owner_row = []
2100 if with_owner:
2099 if with_owner:
2101 usr = AttributeDict(self.user.get_dict())
2100 usr = AttributeDict(self.user.get_dict())
2102 usr.owner_row = True
2101 usr.owner_row = True
2103 usr.permission = _admin_perm
2102 usr.permission = _admin_perm
2104 usr.permission_id = None
2103 usr.permission_id = None
2105 owner_row.append(usr)
2104 owner_row.append(usr)
2106
2105
2107 super_admin_ids = []
2106 super_admin_ids = []
2108 super_admin_rows = []
2107 super_admin_rows = []
2109 if with_admins:
2108 if with_admins:
2110 for usr in User.get_all_super_admins():
2109 for usr in User.get_all_super_admins():
2111 super_admin_ids.append(usr.user_id)
2110 super_admin_ids.append(usr.user_id)
2112 # if this admin is also owner, don't double the record
2111 # if this admin is also owner, don't double the record
2113 if usr.user_id == owner_row[0].user_id:
2112 if usr.user_id == owner_row[0].user_id:
2114 owner_row[0].admin_row = True
2113 owner_row[0].admin_row = True
2115 else:
2114 else:
2116 usr = AttributeDict(usr.get_dict())
2115 usr = AttributeDict(usr.get_dict())
2117 usr.admin_row = True
2116 usr.admin_row = True
2118 usr.permission = _admin_perm
2117 usr.permission = _admin_perm
2119 usr.permission_id = None
2118 usr.permission_id = None
2120 super_admin_rows.append(usr)
2119 super_admin_rows.append(usr)
2121
2120
2122 q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self)
2121 q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self)
2123 q = q.options(joinedload(UserRepoToPerm.repository),
2122 q = q.options(joinedload(UserRepoToPerm.repository),
2124 joinedload(UserRepoToPerm.user),
2123 joinedload(UserRepoToPerm.user),
2125 joinedload(UserRepoToPerm.permission),)
2124 joinedload(UserRepoToPerm.permission),)
2126
2125
2127 # get owners and admins and permissions. We do a trick of re-writing
2126 # get owners and admins and permissions. We do a trick of re-writing
2128 # objects from sqlalchemy to named-tuples due to sqlalchemy session
2127 # objects from sqlalchemy to named-tuples due to sqlalchemy session
2129 # has a global reference and changing one object propagates to all
2128 # has a global reference and changing one object propagates to all
2130 # others. This means if admin is also an owner admin_row that change
2129 # others. This means if admin is also an owner admin_row that change
2131 # would propagate to both objects
2130 # would propagate to both objects
2132 perm_rows = []
2131 perm_rows = []
2133 for _usr in q.all():
2132 for _usr in q.all():
2134 usr = AttributeDict(_usr.user.get_dict())
2133 usr = AttributeDict(_usr.user.get_dict())
2135 # if this user is also owner/admin, mark as duplicate record
2134 # if this user is also owner/admin, mark as duplicate record
2136 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
2135 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
2137 usr.duplicate_perm = True
2136 usr.duplicate_perm = True
2138 # also check if this permission is maybe used by branch_permissions
2137 # also check if this permission is maybe used by branch_permissions
2139 if _usr.branch_perm_entry:
2138 if _usr.branch_perm_entry:
2140 usr.branch_rules = [x.branch_rule_id for x in _usr.branch_perm_entry]
2139 usr.branch_rules = [x.branch_rule_id for x in _usr.branch_perm_entry]
2141
2140
2142 usr.permission = _usr.permission.permission_name
2141 usr.permission = _usr.permission.permission_name
2143 usr.permission_id = _usr.repo_to_perm_id
2142 usr.permission_id = _usr.repo_to_perm_id
2144 perm_rows.append(usr)
2143 perm_rows.append(usr)
2145
2144
2146 # filter the perm rows by 'default' first and then sort them by
2145 # filter the perm rows by 'default' first and then sort them by
2147 # admin,write,read,none permissions sorted again alphabetically in
2146 # admin,write,read,none permissions sorted again alphabetically in
2148 # each group
2147 # each group
2149 perm_rows = sorted(perm_rows, key=display_user_sort)
2148 perm_rows = sorted(perm_rows, key=display_user_sort)
2150
2149
2151 user_groups_rows = []
2150 user_groups_rows = []
2152 if expand_from_user_groups:
2151 if expand_from_user_groups:
2153 for ug in self.permission_user_groups(with_members=True):
2152 for ug in self.permission_user_groups(with_members=True):
2154 for user_data in ug.members:
2153 for user_data in ug.members:
2155 user_groups_rows.append(user_data)
2154 user_groups_rows.append(user_data)
2156
2155
2157 return super_admin_rows + owner_row + perm_rows + user_groups_rows
2156 return super_admin_rows + owner_row + perm_rows + user_groups_rows
2158
2157
2159 def permission_user_groups(self, with_members=True):
2158 def permission_user_groups(self, with_members=True):
2160 q = UserGroupRepoToPerm.query()\
2159 q = UserGroupRepoToPerm.query()\
2161 .filter(UserGroupRepoToPerm.repository == self)
2160 .filter(UserGroupRepoToPerm.repository == self)
2162 q = q.options(joinedload(UserGroupRepoToPerm.repository),
2161 q = q.options(joinedload(UserGroupRepoToPerm.repository),
2163 joinedload(UserGroupRepoToPerm.users_group),
2162 joinedload(UserGroupRepoToPerm.users_group),
2164 joinedload(UserGroupRepoToPerm.permission),)
2163 joinedload(UserGroupRepoToPerm.permission),)
2165
2164
2166 perm_rows = []
2165 perm_rows = []
2167 for _user_group in q.all():
2166 for _user_group in q.all():
2168 entry = AttributeDict(_user_group.users_group.get_dict())
2167 entry = AttributeDict(_user_group.users_group.get_dict())
2169 entry.permission = _user_group.permission.permission_name
2168 entry.permission = _user_group.permission.permission_name
2170 if with_members:
2169 if with_members:
2171 entry.members = [x.user.get_dict()
2170 entry.members = [x.user.get_dict()
2172 for x in _user_group.users_group.members]
2171 for x in _user_group.users_group.members]
2173 perm_rows.append(entry)
2172 perm_rows.append(entry)
2174
2173
2175 perm_rows = sorted(perm_rows, key=display_user_group_sort)
2174 perm_rows = sorted(perm_rows, key=display_user_group_sort)
2176 return perm_rows
2175 return perm_rows
2177
2176
2178 def get_api_data(self, include_secrets=False):
2177 def get_api_data(self, include_secrets=False):
2179 """
2178 """
2180 Common function for generating repo api data
2179 Common function for generating repo api data
2181
2180
2182 :param include_secrets: See :meth:`User.get_api_data`.
2181 :param include_secrets: See :meth:`User.get_api_data`.
2183
2182
2184 """
2183 """
2185 # TODO: mikhail: Here there is an anti-pattern, we probably need to
2184 # TODO: mikhail: Here there is an anti-pattern, we probably need to
2186 # move this methods on models level.
2185 # move this methods on models level.
2187 from rhodecode.model.settings import SettingsModel
2186 from rhodecode.model.settings import SettingsModel
2188 from rhodecode.model.repo import RepoModel
2187 from rhodecode.model.repo import RepoModel
2189
2188
2190 repo = self
2189 repo = self
2191 _user_id, _time, _reason = self.locked
2190 _user_id, _time, _reason = self.locked
2192
2191
2193 data = {
2192 data = {
2194 'repo_id': repo.repo_id,
2193 'repo_id': repo.repo_id,
2195 'repo_name': repo.repo_name,
2194 'repo_name': repo.repo_name,
2196 'repo_type': repo.repo_type,
2195 'repo_type': repo.repo_type,
2197 'clone_uri': repo.clone_uri or '',
2196 'clone_uri': repo.clone_uri or '',
2198 'push_uri': repo.push_uri or '',
2197 'push_uri': repo.push_uri or '',
2199 'url': RepoModel().get_url(self),
2198 'url': RepoModel().get_url(self),
2200 'private': repo.private,
2199 'private': repo.private,
2201 'created_on': repo.created_on,
2200 'created_on': repo.created_on,
2202 'description': repo.description_safe,
2201 'description': repo.description_safe,
2203 'landing_rev': repo.landing_rev,
2202 'landing_rev': repo.landing_rev,
2204 'owner': repo.user.username,
2203 'owner': repo.user.username,
2205 'fork_of': repo.fork.repo_name if repo.fork else None,
2204 'fork_of': repo.fork.repo_name if repo.fork else None,
2206 'fork_of_id': repo.fork.repo_id if repo.fork else None,
2205 'fork_of_id': repo.fork.repo_id if repo.fork else None,
2207 'enable_statistics': repo.enable_statistics,
2206 'enable_statistics': repo.enable_statistics,
2208 'enable_locking': repo.enable_locking,
2207 'enable_locking': repo.enable_locking,
2209 'enable_downloads': repo.enable_downloads,
2208 'enable_downloads': repo.enable_downloads,
2210 'last_changeset': repo.changeset_cache,
2209 'last_changeset': repo.changeset_cache,
2211 'locked_by': User.get(_user_id).get_api_data(
2210 'locked_by': User.get(_user_id).get_api_data(
2212 include_secrets=include_secrets) if _user_id else None,
2211 include_secrets=include_secrets) if _user_id else None,
2213 'locked_date': time_to_datetime(_time) if _time else None,
2212 'locked_date': time_to_datetime(_time) if _time else None,
2214 'lock_reason': _reason if _reason else None,
2213 'lock_reason': _reason if _reason else None,
2215 }
2214 }
2216
2215
2217 # TODO: mikhail: should be per-repo settings here
2216 # TODO: mikhail: should be per-repo settings here
2218 rc_config = SettingsModel().get_all_settings()
2217 rc_config = SettingsModel().get_all_settings()
2219 repository_fields = str2bool(
2218 repository_fields = str2bool(
2220 rc_config.get('rhodecode_repository_fields'))
2219 rc_config.get('rhodecode_repository_fields'))
2221 if repository_fields:
2220 if repository_fields:
2222 for f in self.extra_fields:
2221 for f in self.extra_fields:
2223 data[f.field_key_prefixed] = f.field_value
2222 data[f.field_key_prefixed] = f.field_value
2224
2223
2225 return data
2224 return data
2226
2225
2227 @classmethod
2226 @classmethod
2228 def lock(cls, repo, user_id, lock_time=None, lock_reason=None):
2227 def lock(cls, repo, user_id, lock_time=None, lock_reason=None):
2229 if not lock_time:
2228 if not lock_time:
2230 lock_time = time.time()
2229 lock_time = time.time()
2231 if not lock_reason:
2230 if not lock_reason:
2232 lock_reason = cls.LOCK_AUTOMATIC
2231 lock_reason = cls.LOCK_AUTOMATIC
2233 repo.locked = [user_id, lock_time, lock_reason]
2232 repo.locked = [user_id, lock_time, lock_reason]
2234 Session().add(repo)
2233 Session().add(repo)
2235 Session().commit()
2234 Session().commit()
2236
2235
2237 @classmethod
2236 @classmethod
2238 def unlock(cls, repo):
2237 def unlock(cls, repo):
2239 repo.locked = None
2238 repo.locked = None
2240 Session().add(repo)
2239 Session().add(repo)
2241 Session().commit()
2240 Session().commit()
2242
2241
2243 @classmethod
2242 @classmethod
2244 def getlock(cls, repo):
2243 def getlock(cls, repo):
2245 return repo.locked
2244 return repo.locked
2246
2245
2247 def get_locking_state(self, action, user_id, only_when_enabled=True):
2246 def get_locking_state(self, action, user_id, only_when_enabled=True):
2248 """
2247 """
2249 Checks locking on this repository, if locking is enabled and lock is
2248 Checks locking on this repository, if locking is enabled and lock is
2250 present returns a tuple of make_lock, locked, locked_by.
2249 present returns a tuple of make_lock, locked, locked_by.
2251 make_lock can have 3 states None (do nothing) True, make lock
2250 make_lock can have 3 states None (do nothing) True, make lock
2252 False release lock, This value is later propagated to hooks, which
2251 False release lock, This value is later propagated to hooks, which
2253 do the locking. Think about this as signals passed to hooks what to do.
2252 do the locking. Think about this as signals passed to hooks what to do.
2254
2253
2255 """
2254 """
2256 # TODO: johbo: This is part of the business logic and should be moved
2255 # TODO: johbo: This is part of the business logic and should be moved
2257 # into the RepositoryModel.
2256 # into the RepositoryModel.
2258
2257
2259 if action not in ('push', 'pull'):
2258 if action not in ('push', 'pull'):
2260 raise ValueError("Invalid action value: %s" % repr(action))
2259 raise ValueError("Invalid action value: %s" % repr(action))
2261
2260
2262 # defines if locked error should be thrown to user
2261 # defines if locked error should be thrown to user
2263 currently_locked = False
2262 currently_locked = False
2264 # defines if new lock should be made, tri-state
2263 # defines if new lock should be made, tri-state
2265 make_lock = None
2264 make_lock = None
2266 repo = self
2265 repo = self
2267 user = User.get(user_id)
2266 user = User.get(user_id)
2268
2267
2269 lock_info = repo.locked
2268 lock_info = repo.locked
2270
2269
2271 if repo and (repo.enable_locking or not only_when_enabled):
2270 if repo and (repo.enable_locking or not only_when_enabled):
2272 if action == 'push':
2271 if action == 'push':
2273 # check if it's already locked !, if it is compare users
2272 # check if it's already locked !, if it is compare users
2274 locked_by_user_id = lock_info[0]
2273 locked_by_user_id = lock_info[0]
2275 if user.user_id == locked_by_user_id:
2274 if user.user_id == locked_by_user_id:
2276 log.debug(
2275 log.debug(
2277 'Got `push` action from user %s, now unlocking', user)
2276 'Got `push` action from user %s, now unlocking', user)
2278 # unlock if we have push from user who locked
2277 # unlock if we have push from user who locked
2279 make_lock = False
2278 make_lock = False
2280 else:
2279 else:
2281 # we're not the same user who locked, ban with
2280 # we're not the same user who locked, ban with
2282 # code defined in settings (default is 423 HTTP Locked) !
2281 # code defined in settings (default is 423 HTTP Locked) !
2283 log.debug('Repo %s is currently locked by %s', repo, user)
2282 log.debug('Repo %s is currently locked by %s', repo, user)
2284 currently_locked = True
2283 currently_locked = True
2285 elif action == 'pull':
2284 elif action == 'pull':
2286 # [0] user [1] date
2285 # [0] user [1] date
2287 if lock_info[0] and lock_info[1]:
2286 if lock_info[0] and lock_info[1]:
2288 log.debug('Repo %s is currently locked by %s', repo, user)
2287 log.debug('Repo %s is currently locked by %s', repo, user)
2289 currently_locked = True
2288 currently_locked = True
2290 else:
2289 else:
2291 log.debug('Setting lock on repo %s by %s', repo, user)
2290 log.debug('Setting lock on repo %s by %s', repo, user)
2292 make_lock = True
2291 make_lock = True
2293
2292
2294 else:
2293 else:
2295 log.debug('Repository %s do not have locking enabled', repo)
2294 log.debug('Repository %s do not have locking enabled', repo)
2296
2295
2297 log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s',
2296 log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s',
2298 make_lock, currently_locked, lock_info)
2297 make_lock, currently_locked, lock_info)
2299
2298
2300 from rhodecode.lib.auth import HasRepoPermissionAny
2299 from rhodecode.lib.auth import HasRepoPermissionAny
2301 perm_check = HasRepoPermissionAny('repository.write', 'repository.admin')
2300 perm_check = HasRepoPermissionAny('repository.write', 'repository.admin')
2302 if make_lock and not perm_check(repo_name=repo.repo_name, user=user):
2301 if make_lock and not perm_check(repo_name=repo.repo_name, user=user):
2303 # if we don't have at least write permission we cannot make a lock
2302 # if we don't have at least write permission we cannot make a lock
2304 log.debug('lock state reset back to FALSE due to lack '
2303 log.debug('lock state reset back to FALSE due to lack '
2305 'of at least read permission')
2304 'of at least read permission')
2306 make_lock = False
2305 make_lock = False
2307
2306
2308 return make_lock, currently_locked, lock_info
2307 return make_lock, currently_locked, lock_info
2309
2308
2310 @property
2309 @property
2311 def last_commit_cache_update_diff(self):
2310 def last_commit_cache_update_diff(self):
2312 return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0)
2311 return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0)
2313
2312
2314 @classmethod
2313 @classmethod
2315 def _load_commit_change(cls, last_commit_cache):
2314 def _load_commit_change(cls, last_commit_cache):
2316 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2315 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2317 empty_date = datetime.datetime.fromtimestamp(0)
2316 empty_date = datetime.datetime.fromtimestamp(0)
2318 date_latest = last_commit_cache.get('date', empty_date)
2317 date_latest = last_commit_cache.get('date', empty_date)
2319 try:
2318 try:
2320 return parse_datetime(date_latest)
2319 return parse_datetime(date_latest)
2321 except Exception:
2320 except Exception:
2322 return empty_date
2321 return empty_date
2323
2322
2324 @property
2323 @property
2325 def last_commit_change(self):
2324 def last_commit_change(self):
2326 return self._load_commit_change(self.changeset_cache)
2325 return self._load_commit_change(self.changeset_cache)
2327
2326
2328 @property
2327 @property
2329 def last_db_change(self):
2328 def last_db_change(self):
2330 return self.updated_on
2329 return self.updated_on
2331
2330
2332 @property
2331 @property
2333 def clone_uri_hidden(self):
2332 def clone_uri_hidden(self):
2334 clone_uri = self.clone_uri
2333 clone_uri = self.clone_uri
2335 if clone_uri:
2334 if clone_uri:
2336 import urlobject
2335 import urlobject
2337 url_obj = urlobject.URLObject(cleaned_uri(clone_uri))
2336 url_obj = urlobject.URLObject(cleaned_uri(clone_uri))
2338 if url_obj.password:
2337 if url_obj.password:
2339 clone_uri = url_obj.with_password('*****')
2338 clone_uri = url_obj.with_password('*****')
2340 return clone_uri
2339 return clone_uri
2341
2340
2342 @property
2341 @property
2343 def push_uri_hidden(self):
2342 def push_uri_hidden(self):
2344 push_uri = self.push_uri
2343 push_uri = self.push_uri
2345 if push_uri:
2344 if push_uri:
2346 import urlobject
2345 import urlobject
2347 url_obj = urlobject.URLObject(cleaned_uri(push_uri))
2346 url_obj = urlobject.URLObject(cleaned_uri(push_uri))
2348 if url_obj.password:
2347 if url_obj.password:
2349 push_uri = url_obj.with_password('*****')
2348 push_uri = url_obj.with_password('*****')
2350 return push_uri
2349 return push_uri
2351
2350
2352 def clone_url(self, **override):
2351 def clone_url(self, **override):
2353 from rhodecode.model.settings import SettingsModel
2352 from rhodecode.model.settings import SettingsModel
2354
2353
2355 uri_tmpl = None
2354 uri_tmpl = None
2356 if 'with_id' in override:
2355 if 'with_id' in override:
2357 uri_tmpl = self.DEFAULT_CLONE_URI_ID
2356 uri_tmpl = self.DEFAULT_CLONE_URI_ID
2358 del override['with_id']
2357 del override['with_id']
2359
2358
2360 if 'uri_tmpl' in override:
2359 if 'uri_tmpl' in override:
2361 uri_tmpl = override['uri_tmpl']
2360 uri_tmpl = override['uri_tmpl']
2362 del override['uri_tmpl']
2361 del override['uri_tmpl']
2363
2362
2364 ssh = False
2363 ssh = False
2365 if 'ssh' in override:
2364 if 'ssh' in override:
2366 ssh = True
2365 ssh = True
2367 del override['ssh']
2366 del override['ssh']
2368
2367
2369 # we didn't override our tmpl from **overrides
2368 # we didn't override our tmpl from **overrides
2370 request = get_current_request()
2369 request = get_current_request()
2371 if not uri_tmpl:
2370 if not uri_tmpl:
2372 if hasattr(request, 'call_context') and hasattr(request.call_context, 'rc_config'):
2371 if hasattr(request, 'call_context') and hasattr(request.call_context, 'rc_config'):
2373 rc_config = request.call_context.rc_config
2372 rc_config = request.call_context.rc_config
2374 else:
2373 else:
2375 rc_config = SettingsModel().get_all_settings(cache=True)
2374 rc_config = SettingsModel().get_all_settings(cache=True)
2376
2375
2377 if ssh:
2376 if ssh:
2378 uri_tmpl = rc_config.get(
2377 uri_tmpl = rc_config.get(
2379 'rhodecode_clone_uri_ssh_tmpl') or self.DEFAULT_CLONE_URI_SSH
2378 'rhodecode_clone_uri_ssh_tmpl') or self.DEFAULT_CLONE_URI_SSH
2380
2379
2381 else:
2380 else:
2382 uri_tmpl = rc_config.get(
2381 uri_tmpl = rc_config.get(
2383 'rhodecode_clone_uri_tmpl') or self.DEFAULT_CLONE_URI
2382 'rhodecode_clone_uri_tmpl') or self.DEFAULT_CLONE_URI
2384
2383
2385 return get_clone_url(request=request,
2384 return get_clone_url(request=request,
2386 uri_tmpl=uri_tmpl,
2385 uri_tmpl=uri_tmpl,
2387 repo_name=self.repo_name,
2386 repo_name=self.repo_name,
2388 repo_id=self.repo_id,
2387 repo_id=self.repo_id,
2389 repo_type=self.repo_type,
2388 repo_type=self.repo_type,
2390 **override)
2389 **override)
2391
2390
2392 def set_state(self, state):
2391 def set_state(self, state):
2393 self.repo_state = state
2392 self.repo_state = state
2394 Session().add(self)
2393 Session().add(self)
2395 #==========================================================================
2394 #==========================================================================
2396 # SCM PROPERTIES
2395 # SCM PROPERTIES
2397 #==========================================================================
2396 #==========================================================================
2398
2397
2399 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, maybe_unreachable=False, reference_obj=None):
2398 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, maybe_unreachable=False, reference_obj=None):
2400 return get_commit_safe(
2399 return get_commit_safe(
2401 self.scm_instance(), commit_id, commit_idx, pre_load=pre_load,
2400 self.scm_instance(), commit_id, commit_idx, pre_load=pre_load,
2402 maybe_unreachable=maybe_unreachable, reference_obj=reference_obj)
2401 maybe_unreachable=maybe_unreachable, reference_obj=reference_obj)
2403
2402
2404 def get_changeset(self, rev=None, pre_load=None):
2403 def get_changeset(self, rev=None, pre_load=None):
2405 warnings.warn("Use get_commit", DeprecationWarning)
2404 warnings.warn("Use get_commit", DeprecationWarning)
2406 commit_id = None
2405 commit_id = None
2407 commit_idx = None
2406 commit_idx = None
2408 if isinstance(rev, str):
2407 if isinstance(rev, str):
2409 commit_id = rev
2408 commit_id = rev
2410 else:
2409 else:
2411 commit_idx = rev
2410 commit_idx = rev
2412 return self.get_commit(commit_id=commit_id, commit_idx=commit_idx,
2411 return self.get_commit(commit_id=commit_id, commit_idx=commit_idx,
2413 pre_load=pre_load)
2412 pre_load=pre_load)
2414
2413
2415 def get_landing_commit(self):
2414 def get_landing_commit(self):
2416 """
2415 """
2417 Returns landing commit, or if that doesn't exist returns the tip
2416 Returns landing commit, or if that doesn't exist returns the tip
2418 """
2417 """
2419 _rev_type, _rev = self.landing_rev
2418 _rev_type, _rev = self.landing_rev
2420 commit = self.get_commit(_rev)
2419 commit = self.get_commit(_rev)
2421 if isinstance(commit, EmptyCommit):
2420 if isinstance(commit, EmptyCommit):
2422 return self.get_commit()
2421 return self.get_commit()
2423 return commit
2422 return commit
2424
2423
2425 def flush_commit_cache(self):
2424 def flush_commit_cache(self):
2426 self.update_commit_cache(cs_cache={'raw_id':'0'})
2425 self.update_commit_cache(cs_cache={'raw_id':'0'})
2427 self.update_commit_cache()
2426 self.update_commit_cache()
2428
2427
2429 def update_commit_cache(self, cs_cache=None, config=None):
2428 def update_commit_cache(self, cs_cache=None, config=None):
2430 """
2429 """
2431 Update cache of last commit for repository
2430 Update cache of last commit for repository
2432 cache_keys should be::
2431 cache_keys should be::
2433
2432
2434 source_repo_id
2433 source_repo_id
2435 short_id
2434 short_id
2436 raw_id
2435 raw_id
2437 revision
2436 revision
2438 parents
2437 parents
2439 message
2438 message
2440 date
2439 date
2441 author
2440 author
2442 updated_on
2441 updated_on
2443
2442
2444 """
2443 """
2445 from rhodecode.lib.vcs.backends.base import BaseCommit
2444 from rhodecode.lib.vcs.backends.base import BaseCommit
2446 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2445 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2447 empty_date = datetime.datetime.fromtimestamp(0)
2446 empty_date = datetime.datetime.fromtimestamp(0)
2448 repo_commit_count = 0
2447 repo_commit_count = 0
2449
2448
2450 if cs_cache is None:
2449 if cs_cache is None:
2451 # use no-cache version here
2450 # use no-cache version here
2452 try:
2451 try:
2453 scm_repo = self.scm_instance(cache=False, config=config)
2452 scm_repo = self.scm_instance(cache=False, config=config)
2454 except VCSError:
2453 except VCSError:
2455 scm_repo = None
2454 scm_repo = None
2456 empty = scm_repo is None or scm_repo.is_empty()
2455 empty = scm_repo is None or scm_repo.is_empty()
2457
2456
2458 if not empty:
2457 if not empty:
2459 cs_cache = scm_repo.get_commit(
2458 cs_cache = scm_repo.get_commit(
2460 pre_load=["author", "date", "message", "parents", "branch"])
2459 pre_load=["author", "date", "message", "parents", "branch"])
2461 repo_commit_count = scm_repo.count()
2460 repo_commit_count = scm_repo.count()
2462 else:
2461 else:
2463 cs_cache = EmptyCommit()
2462 cs_cache = EmptyCommit()
2464
2463
2465 if isinstance(cs_cache, BaseCommit):
2464 if isinstance(cs_cache, BaseCommit):
2466 cs_cache = cs_cache.__json__()
2465 cs_cache = cs_cache.__json__()
2467
2466
2468 def is_outdated(new_cs_cache):
2467 def is_outdated(new_cs_cache):
2469 if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or
2468 if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or
2470 new_cs_cache['revision'] != self.changeset_cache['revision']):
2469 new_cs_cache['revision'] != self.changeset_cache['revision']):
2471 return True
2470 return True
2472 return False
2471 return False
2473
2472
2474 # check if we have maybe already latest cached revision
2473 # check if we have maybe already latest cached revision
2475 if is_outdated(cs_cache) or not self.changeset_cache:
2474 if is_outdated(cs_cache) or not self.changeset_cache:
2476 _current_datetime = datetime.datetime.utcnow()
2475 _current_datetime = datetime.datetime.utcnow()
2477 last_change = cs_cache.get('date') or _current_datetime
2476 last_change = cs_cache.get('date') or _current_datetime
2478 # we check if last update is newer than the new value
2477 # we check if last update is newer than the new value
2479 # if yes, we use the current timestamp instead. Imagine you get
2478 # if yes, we use the current timestamp instead. Imagine you get
2480 # old commit pushed 1y ago, we'd set last update 1y to ago.
2479 # old commit pushed 1y ago, we'd set last update 1y to ago.
2481 last_change_timestamp = datetime_to_time(last_change)
2480 last_change_timestamp = datetime_to_time(last_change)
2482 current_timestamp = datetime_to_time(last_change)
2481 current_timestamp = datetime_to_time(last_change)
2483 if last_change_timestamp > current_timestamp and not empty:
2482 if last_change_timestamp > current_timestamp and not empty:
2484 cs_cache['date'] = _current_datetime
2483 cs_cache['date'] = _current_datetime
2485
2484
2486 # also store size of repo
2485 # also store size of repo
2487 cs_cache['repo_commit_count'] = repo_commit_count
2486 cs_cache['repo_commit_count'] = repo_commit_count
2488
2487
2489 _date_latest = parse_datetime(cs_cache.get('date') or empty_date)
2488 _date_latest = parse_datetime(cs_cache.get('date') or empty_date)
2490 cs_cache['updated_on'] = time.time()
2489 cs_cache['updated_on'] = time.time()
2491 self.changeset_cache = cs_cache
2490 self.changeset_cache = cs_cache
2492 self.updated_on = last_change
2491 self.updated_on = last_change
2493 Session().add(self)
2492 Session().add(self)
2494 Session().commit()
2493 Session().commit()
2495
2494
2496 else:
2495 else:
2497 if empty:
2496 if empty:
2498 cs_cache = EmptyCommit().__json__()
2497 cs_cache = EmptyCommit().__json__()
2499 else:
2498 else:
2500 cs_cache = self.changeset_cache
2499 cs_cache = self.changeset_cache
2501
2500
2502 _date_latest = parse_datetime(cs_cache.get('date') or empty_date)
2501 _date_latest = parse_datetime(cs_cache.get('date') or empty_date)
2503
2502
2504 cs_cache['updated_on'] = time.time()
2503 cs_cache['updated_on'] = time.time()
2505 self.changeset_cache = cs_cache
2504 self.changeset_cache = cs_cache
2506 self.updated_on = _date_latest
2505 self.updated_on = _date_latest
2507 Session().add(self)
2506 Session().add(self)
2508 Session().commit()
2507 Session().commit()
2509
2508
2510 log.debug('updated repo `%s` with new commit cache %s, and last update_date: %s',
2509 log.debug('updated repo `%s` with new commit cache %s, and last update_date: %s',
2511 self.repo_name, cs_cache, _date_latest)
2510 self.repo_name, cs_cache, _date_latest)
2512
2511
2513 @property
2512 @property
2514 def tip(self):
2513 def tip(self):
2515 return self.get_commit('tip')
2514 return self.get_commit('tip')
2516
2515
2517 @property
2516 @property
2518 def author(self):
2517 def author(self):
2519 return self.tip.author
2518 return self.tip.author
2520
2519
2521 @property
2520 @property
2522 def last_change(self):
2521 def last_change(self):
2523 return self.scm_instance().last_change
2522 return self.scm_instance().last_change
2524
2523
2525 def get_comments(self, revisions=None):
2524 def get_comments(self, revisions=None):
2526 """
2525 """
2527 Returns comments for this repository grouped by revisions
2526 Returns comments for this repository grouped by revisions
2528
2527
2529 :param revisions: filter query by revisions only
2528 :param revisions: filter query by revisions only
2530 """
2529 """
2531 cmts = ChangesetComment.query()\
2530 cmts = ChangesetComment.query()\
2532 .filter(ChangesetComment.repo == self)
2531 .filter(ChangesetComment.repo == self)
2533 if revisions:
2532 if revisions:
2534 cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
2533 cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
2535 grouped = collections.defaultdict(list)
2534 grouped = collections.defaultdict(list)
2536 for cmt in cmts.all():
2535 for cmt in cmts.all():
2537 grouped[cmt.revision].append(cmt)
2536 grouped[cmt.revision].append(cmt)
2538 return grouped
2537 return grouped
2539
2538
2540 def statuses(self, revisions=None):
2539 def statuses(self, revisions=None):
2541 """
2540 """
2542 Returns statuses for this repository
2541 Returns statuses for this repository
2543
2542
2544 :param revisions: list of revisions to get statuses for
2543 :param revisions: list of revisions to get statuses for
2545 """
2544 """
2546 statuses = ChangesetStatus.query()\
2545 statuses = ChangesetStatus.query()\
2547 .filter(ChangesetStatus.repo == self)\
2546 .filter(ChangesetStatus.repo == self)\
2548 .filter(ChangesetStatus.version == 0)
2547 .filter(ChangesetStatus.version == 0)
2549
2548
2550 if revisions:
2549 if revisions:
2551 # Try doing the filtering in chunks to avoid hitting limits
2550 # Try doing the filtering in chunks to avoid hitting limits
2552 size = 500
2551 size = 500
2553 status_results = []
2552 status_results = []
2554 for chunk in range(0, len(revisions), size):
2553 for chunk in range(0, len(revisions), size):
2555 status_results += statuses.filter(
2554 status_results += statuses.filter(
2556 ChangesetStatus.revision.in_(
2555 ChangesetStatus.revision.in_(
2557 revisions[chunk: chunk+size])
2556 revisions[chunk: chunk+size])
2558 ).all()
2557 ).all()
2559 else:
2558 else:
2560 status_results = statuses.all()
2559 status_results = statuses.all()
2561
2560
2562 grouped = {}
2561 grouped = {}
2563
2562
2564 # maybe we have open new pullrequest without a status?
2563 # maybe we have open new pullrequest without a status?
2565 stat = ChangesetStatus.STATUS_UNDER_REVIEW
2564 stat = ChangesetStatus.STATUS_UNDER_REVIEW
2566 status_lbl = ChangesetStatus.get_status_lbl(stat)
2565 status_lbl = ChangesetStatus.get_status_lbl(stat)
2567 for pr in PullRequest.query().filter(PullRequest.source_repo == self).all():
2566 for pr in PullRequest.query().filter(PullRequest.source_repo == self).all():
2568 for rev in pr.revisions:
2567 for rev in pr.revisions:
2569 pr_id = pr.pull_request_id
2568 pr_id = pr.pull_request_id
2570 pr_repo = pr.target_repo.repo_name
2569 pr_repo = pr.target_repo.repo_name
2571 grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
2570 grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
2572
2571
2573 for stat in status_results:
2572 for stat in status_results:
2574 pr_id = pr_repo = None
2573 pr_id = pr_repo = None
2575 if stat.pull_request:
2574 if stat.pull_request:
2576 pr_id = stat.pull_request.pull_request_id
2575 pr_id = stat.pull_request.pull_request_id
2577 pr_repo = stat.pull_request.target_repo.repo_name
2576 pr_repo = stat.pull_request.target_repo.repo_name
2578 grouped[stat.revision] = [str(stat.status), stat.status_lbl,
2577 grouped[stat.revision] = [str(stat.status), stat.status_lbl,
2579 pr_id, pr_repo]
2578 pr_id, pr_repo]
2580 return grouped
2579 return grouped
2581
2580
2582 # ==========================================================================
2581 # ==========================================================================
2583 # SCM CACHE INSTANCE
2582 # SCM CACHE INSTANCE
2584 # ==========================================================================
2583 # ==========================================================================
2585
2584
2586 def scm_instance(self, **kwargs):
2585 def scm_instance(self, **kwargs):
2587 import rhodecode
2586 import rhodecode
2588
2587
2589 # Passing a config will not hit the cache currently only used
2588 # Passing a config will not hit the cache currently only used
2590 # for repo2dbmapper
2589 # for repo2dbmapper
2591 config = kwargs.pop('config', None)
2590 config = kwargs.pop('config', None)
2592 cache = kwargs.pop('cache', None)
2591 cache = kwargs.pop('cache', None)
2593 vcs_full_cache = kwargs.pop('vcs_full_cache', None)
2592 vcs_full_cache = kwargs.pop('vcs_full_cache', None)
2594 if vcs_full_cache is not None:
2593 if vcs_full_cache is not None:
2595 # allows override global config
2594 # allows override global config
2596 full_cache = vcs_full_cache
2595 full_cache = vcs_full_cache
2597 else:
2596 else:
2598 full_cache = rhodecode.ConfigGet().get_bool('vcs_full_cache')
2597 full_cache = rhodecode.ConfigGet().get_bool('vcs_full_cache')
2599 # if cache is NOT defined use default global, else we have a full
2598 # if cache is NOT defined use default global, else we have a full
2600 # control over cache behaviour
2599 # control over cache behaviour
2601 if cache is None and full_cache and not config:
2600 if cache is None and full_cache and not config:
2602 log.debug('Initializing pure cached instance for %s', self.repo_path)
2601 log.debug('Initializing pure cached instance for %s', self.repo_path)
2603 return self._get_instance_cached()
2602 return self._get_instance_cached()
2604
2603
2605 # cache here is sent to the "vcs server"
2604 # cache here is sent to the "vcs server"
2606 return self._get_instance(cache=bool(cache), config=config)
2605 return self._get_instance(cache=bool(cache), config=config)
2607
2606
2608 def _get_instance_cached(self):
2607 def _get_instance_cached(self):
2609 from rhodecode.lib import rc_cache
2608 from rhodecode.lib import rc_cache
2610
2609
2611 cache_namespace_uid = f'repo_instance.{self.repo_id}'
2610 cache_namespace_uid = f'repo_instance.{self.repo_id}'
2612 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
2613 repo_id=self.repo_id)
2614 region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid)
2611 region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid)
2615
2612
2616 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid)
2617 def get_instance_cached(repo_id, context_id, _cache_state_uid):
2618 return self._get_instance(repo_state_uid=_cache_state_uid)
2619
2620 # we must use thread scoped cache here,
2613 # we must use thread scoped cache here,
2621 # because each thread of gevent needs it's own not shared connection and cache
2614 # because each thread of gevent needs it's own not shared connection and cache
2622 # we also alter `args` so the cache key is individual for every green thread.
2615 # we also alter `args` so the cache key is individual for every green thread.
2623 inv_context_manager = rc_cache.InvalidationContext(
2616 repo_namespace_key = CacheKey.REPO_INVALIDATION_NAMESPACE.format(repo_id=self.repo_id)
2624 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace,
2617 inv_context_manager = rc_cache.InvalidationContext(key=repo_namespace_key, thread_scoped=True)
2625 thread_scoped=True)
2618
2619 # our wrapped caching function that takes state_uid to save the previous state in
2620 def cache_generator(_state_uid):
2621
2622 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid)
2623 def get_instance_cached(_repo_id, _process_context_id):
2624 # we save in cached func the generation state so we can detect a change and invalidate caches
2625 return _state_uid, self._get_instance(repo_state_uid=_state_uid)
2626
2627 return get_instance_cached
2628
2626 with inv_context_manager as invalidation_context:
2629 with inv_context_manager as invalidation_context:
2627 cache_state_uid = invalidation_context.cache_data['cache_state_uid']
2630 cache_state_uid = invalidation_context.state_uid
2628 args = (self.repo_id, inv_context_manager.cache_key, cache_state_uid)
2631 cache_func = cache_generator(cache_state_uid)
2629
2632
2630 # re-compute and store cache if we get invalidate signal
2633 args = self.repo_id, inv_context_manager.proc_key
2631 if invalidation_context.should_invalidate():
2634
2632 instance = get_instance_cached.refresh(*args)
2635 previous_state_uid, instance = cache_func(*args)
2633 else:
2636
2634 instance = get_instance_cached(*args)
2637 if instance:
2638 # now compare keys, the "cache" state vs expected state.
2639 if previous_state_uid != cache_state_uid:
2640 log.warning('Cached state uid %s is different than current state uid %s',
2641 previous_state_uid, cache_state_uid)
2642 _, instance = cache_func.refresh(*args)
2635
2643
2636 log.debug('Repo instance fetched in %.4fs', inv_context_manager.compute_time)
2644 log.debug('Repo instance fetched in %.4fs', inv_context_manager.compute_time)
2637 return instance
2645 return instance
2638
2646
2639 def _get_instance(self, cache=True, config=None, repo_state_uid=None):
2647 def _get_instance(self, cache=True, config=None, repo_state_uid=None):
2640 log.debug('Initializing %s instance `%s` with cache flag set to: %s',
2648 log.debug('Initializing %s instance `%s` with cache flag set to: %s',
2641 self.repo_type, self.repo_path, cache)
2649 self.repo_type, self.repo_path, cache)
2642 config = config or self._config
2650 config = config or self._config
2643 custom_wire = {
2651 custom_wire = {
2644 'cache': cache, # controls the vcs.remote cache
2652 'cache': cache, # controls the vcs.remote cache
2645 'repo_state_uid': repo_state_uid
2653 'repo_state_uid': repo_state_uid
2646 }
2654 }
2647 repo = get_vcs_instance(
2655 repo = get_vcs_instance(
2648 repo_path=safe_str(self.repo_full_path),
2656 repo_path=safe_str(self.repo_full_path),
2649 config=config,
2657 config=config,
2650 with_wire=custom_wire,
2658 with_wire=custom_wire,
2651 create=False,
2659 create=False,
2652 _vcs_alias=self.repo_type)
2660 _vcs_alias=self.repo_type)
2653 if repo is not None:
2661 if repo is not None:
2654 repo.count() # cache rebuild
2662 repo.count() # cache rebuild
2663
2655 return repo
2664 return repo
2656
2665
2657 def get_shadow_repository_path(self, workspace_id):
2666 def get_shadow_repository_path(self, workspace_id):
2658 from rhodecode.lib.vcs.backends.base import BaseRepository
2667 from rhodecode.lib.vcs.backends.base import BaseRepository
2659 shadow_repo_path = BaseRepository._get_shadow_repository_path(
2668 shadow_repo_path = BaseRepository._get_shadow_repository_path(
2660 self.repo_full_path, self.repo_id, workspace_id)
2669 self.repo_full_path, self.repo_id, workspace_id)
2661 return shadow_repo_path
2670 return shadow_repo_path
2662
2671
2663 def __json__(self):
2672 def __json__(self):
2664 return {'landing_rev': self.landing_rev}
2673 return {'landing_rev': self.landing_rev}
2665
2674
2666 def get_dict(self):
2675 def get_dict(self):
2667
2676
2668 # Since we transformed `repo_name` to a hybrid property, we need to
2677 # Since we transformed `repo_name` to a hybrid property, we need to
2669 # keep compatibility with the code which uses `repo_name` field.
2678 # keep compatibility with the code which uses `repo_name` field.
2670
2679
2671 result = super(Repository, self).get_dict()
2680 result = super(Repository, self).get_dict()
2672 result['repo_name'] = result.pop('_repo_name', None)
2681 result['repo_name'] = result.pop('_repo_name', None)
2673 result.pop('_changeset_cache', '')
2682 result.pop('_changeset_cache', '')
2674 return result
2683 return result
2675
2684
2676
2685
2677 class RepoGroup(Base, BaseModel):
2686 class RepoGroup(Base, BaseModel):
2678 __tablename__ = 'groups'
2687 __tablename__ = 'groups'
2679 __table_args__ = (
2688 __table_args__ = (
2680 UniqueConstraint('group_name', 'group_parent_id'),
2689 UniqueConstraint('group_name', 'group_parent_id'),
2681 base_table_args,
2690 base_table_args,
2682 )
2691 )
2683
2692
2684 CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups
2693 CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups
2685
2694
2686 group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2695 group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2687 _group_name = Column("group_name", String(255), nullable=False, unique=True, default=None)
2696 _group_name = Column("group_name", String(255), nullable=False, unique=True, default=None)
2688 group_name_hash = Column("repo_group_name_hash", String(1024), nullable=False, unique=False)
2697 group_name_hash = Column("repo_group_name_hash", String(1024), nullable=False, unique=False)
2689 group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
2698 group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
2690 group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None)
2699 group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None)
2691 enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
2700 enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
2692 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
2701 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
2693 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2702 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2694 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
2703 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
2695 personal = Column('personal', Boolean(), nullable=True, unique=None, default=None)
2704 personal = Column('personal', Boolean(), nullable=True, unique=None, default=None)
2696 _changeset_cache = Column("changeset_cache", LargeBinary(), nullable=True) # JSON data
2705 _changeset_cache = Column("changeset_cache", LargeBinary(), nullable=True) # JSON data
2697
2706
2698 repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id', back_populates='group')
2707 repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id', back_populates='group')
2699 users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all', back_populates='group')
2708 users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all', back_populates='group')
2700 parent_group = relationship('RepoGroup', remote_side=group_id)
2709 parent_group = relationship('RepoGroup', remote_side=group_id)
2701 user = relationship('User', back_populates='repository_groups')
2710 user = relationship('User', back_populates='repository_groups')
2702 integrations = relationship('Integration', cascade="all, delete-orphan", back_populates='repo_group')
2711 integrations = relationship('Integration', cascade="all, delete-orphan", back_populates='repo_group')
2703
2712
2704 # no cascade, set NULL
2713 # no cascade, set NULL
2705 scope_artifacts = relationship('FileStore', primaryjoin='FileStore.scope_repo_group_id==RepoGroup.group_id', viewonly=True)
2714 scope_artifacts = relationship('FileStore', primaryjoin='FileStore.scope_repo_group_id==RepoGroup.group_id', viewonly=True)
2706
2715
2707 def __init__(self, group_name='', parent_group=None):
2716 def __init__(self, group_name='', parent_group=None):
2708 self.group_name = group_name
2717 self.group_name = group_name
2709 self.parent_group = parent_group
2718 self.parent_group = parent_group
2710
2719
2711 def __repr__(self):
2720 def __repr__(self):
2712 return f"<{self.cls_name}('id:{self.group_id}:{self.group_name}')>"
2721 return f"<{self.cls_name}('id:{self.group_id}:{self.group_name}')>"
2713
2722
2714 @hybrid_property
2723 @hybrid_property
2715 def group_name(self):
2724 def group_name(self):
2716 return self._group_name
2725 return self._group_name
2717
2726
2718 @group_name.setter
2727 @group_name.setter
2719 def group_name(self, value):
2728 def group_name(self, value):
2720 self._group_name = value
2729 self._group_name = value
2721 self.group_name_hash = self.hash_repo_group_name(value)
2730 self.group_name_hash = self.hash_repo_group_name(value)
2722
2731
2723 @classmethod
2732 @classmethod
2724 def _load_changeset_cache(cls, repo_id, changeset_cache_raw):
2733 def _load_changeset_cache(cls, repo_id, changeset_cache_raw):
2725 from rhodecode.lib.vcs.backends.base import EmptyCommit
2734 from rhodecode.lib.vcs.backends.base import EmptyCommit
2726 dummy = EmptyCommit().__json__()
2735 dummy = EmptyCommit().__json__()
2727 if not changeset_cache_raw:
2736 if not changeset_cache_raw:
2728 dummy['source_repo_id'] = repo_id
2737 dummy['source_repo_id'] = repo_id
2729 return json.loads(json.dumps(dummy))
2738 return json.loads(json.dumps(dummy))
2730
2739
2731 try:
2740 try:
2732 return json.loads(changeset_cache_raw)
2741 return json.loads(changeset_cache_raw)
2733 except TypeError:
2742 except TypeError:
2734 return dummy
2743 return dummy
2735 except Exception:
2744 except Exception:
2736 log.error(traceback.format_exc())
2745 log.error(traceback.format_exc())
2737 return dummy
2746 return dummy
2738
2747
2739 @hybrid_property
2748 @hybrid_property
2740 def changeset_cache(self):
2749 def changeset_cache(self):
2741 return self._load_changeset_cache('', self._changeset_cache)
2750 return self._load_changeset_cache('', self._changeset_cache)
2742
2751
2743 @changeset_cache.setter
2752 @changeset_cache.setter
2744 def changeset_cache(self, val):
2753 def changeset_cache(self, val):
2745 try:
2754 try:
2746 self._changeset_cache = json.dumps(val)
2755 self._changeset_cache = json.dumps(val)
2747 except Exception:
2756 except Exception:
2748 log.error(traceback.format_exc())
2757 log.error(traceback.format_exc())
2749
2758
2750 @validates('group_parent_id')
2759 @validates('group_parent_id')
2751 def validate_group_parent_id(self, key, val):
2760 def validate_group_parent_id(self, key, val):
2752 """
2761 """
2753 Check cycle references for a parent group to self
2762 Check cycle references for a parent group to self
2754 """
2763 """
2755 if self.group_id and val:
2764 if self.group_id and val:
2756 assert val != self.group_id
2765 assert val != self.group_id
2757
2766
2758 return val
2767 return val
2759
2768
2760 @hybrid_property
2769 @hybrid_property
2761 def description_safe(self):
2770 def description_safe(self):
2762 from rhodecode.lib import helpers as h
2771 from rhodecode.lib import helpers as h
2763 return h.escape(self.group_description)
2772 return h.escape(self.group_description)
2764
2773
2765 @classmethod
2774 @classmethod
2766 def hash_repo_group_name(cls, repo_group_name):
2775 def hash_repo_group_name(cls, repo_group_name):
2767 val = remove_formatting(repo_group_name)
2776 val = remove_formatting(repo_group_name)
2768 val = safe_str(val).lower()
2777 val = safe_str(val).lower()
2769 chars = []
2778 chars = []
2770 for c in val:
2779 for c in val:
2771 if c not in string.ascii_letters:
2780 if c not in string.ascii_letters:
2772 c = str(ord(c))
2781 c = str(ord(c))
2773 chars.append(c)
2782 chars.append(c)
2774
2783
2775 return ''.join(chars)
2784 return ''.join(chars)
2776
2785
2777 @classmethod
2786 @classmethod
2778 def _generate_choice(cls, repo_group):
2787 def _generate_choice(cls, repo_group):
2779 from webhelpers2.html import literal as _literal
2788 from webhelpers2.html import literal as _literal
2780
2789
2781 def _name(k):
2790 def _name(k):
2782 return _literal(cls.CHOICES_SEPARATOR.join(k))
2791 return _literal(cls.CHOICES_SEPARATOR.join(k))
2783
2792
2784 return repo_group.group_id, _name(repo_group.full_path_splitted)
2793 return repo_group.group_id, _name(repo_group.full_path_splitted)
2785
2794
2786 @classmethod
2795 @classmethod
2787 def groups_choices(cls, groups=None, show_empty_group=True):
2796 def groups_choices(cls, groups=None, show_empty_group=True):
2788 if not groups:
2797 if not groups:
2789 groups = cls.query().all()
2798 groups = cls.query().all()
2790
2799
2791 repo_groups = []
2800 repo_groups = []
2792 if show_empty_group:
2801 if show_empty_group:
2793 repo_groups = [(-1, '-- %s --' % _('No parent'))]
2802 repo_groups = [(-1, '-- %s --' % _('No parent'))]
2794
2803
2795 repo_groups.extend([cls._generate_choice(x) for x in groups])
2804 repo_groups.extend([cls._generate_choice(x) for x in groups])
2796
2805
2797 repo_groups = sorted(
2806 repo_groups = sorted(
2798 repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0])
2807 repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0])
2799 return repo_groups
2808 return repo_groups
2800
2809
2801 @classmethod
2810 @classmethod
2802 def url_sep(cls):
2811 def url_sep(cls):
2803 return URL_SEP
2812 return URL_SEP
2804
2813
2805 @classmethod
2814 @classmethod
2806 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
2815 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
2807 if case_insensitive:
2816 if case_insensitive:
2808 gr = cls.query().filter(func.lower(cls.group_name)
2817 gr = cls.query().filter(func.lower(cls.group_name)
2809 == func.lower(group_name))
2818 == func.lower(group_name))
2810 else:
2819 else:
2811 gr = cls.query().filter(cls.group_name == group_name)
2820 gr = cls.query().filter(cls.group_name == group_name)
2812 if cache:
2821 if cache:
2813 name_key = _hash_key(group_name)
2822 name_key = _hash_key(group_name)
2814 gr = gr.options(
2823 gr = gr.options(
2815 FromCache("sql_cache_short", f"get_group_{name_key}"))
2824 FromCache("sql_cache_short", f"get_group_{name_key}"))
2816 return gr.scalar()
2825 return gr.scalar()
2817
2826
2818 @classmethod
2827 @classmethod
2819 def get_user_personal_repo_group(cls, user_id):
2828 def get_user_personal_repo_group(cls, user_id):
2820 user = User.get(user_id)
2829 user = User.get(user_id)
2821 if user.username == User.DEFAULT_USER:
2830 if user.username == User.DEFAULT_USER:
2822 return None
2831 return None
2823
2832
2824 return cls.query()\
2833 return cls.query()\
2825 .filter(cls.personal == true()) \
2834 .filter(cls.personal == true()) \
2826 .filter(cls.user == user) \
2835 .filter(cls.user == user) \
2827 .order_by(cls.group_id.asc()) \
2836 .order_by(cls.group_id.asc()) \
2828 .first()
2837 .first()
2829
2838
2830 @classmethod
2839 @classmethod
2831 def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None),
2840 def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None),
2832 case_insensitive=True):
2841 case_insensitive=True):
2833 q = RepoGroup.query()
2842 q = RepoGroup.query()
2834
2843
2835 if not isinstance(user_id, Optional):
2844 if not isinstance(user_id, Optional):
2836 q = q.filter(RepoGroup.user_id == user_id)
2845 q = q.filter(RepoGroup.user_id == user_id)
2837
2846
2838 if not isinstance(group_id, Optional):
2847 if not isinstance(group_id, Optional):
2839 q = q.filter(RepoGroup.group_parent_id == group_id)
2848 q = q.filter(RepoGroup.group_parent_id == group_id)
2840
2849
2841 if case_insensitive:
2850 if case_insensitive:
2842 q = q.order_by(func.lower(RepoGroup.group_name))
2851 q = q.order_by(func.lower(RepoGroup.group_name))
2843 else:
2852 else:
2844 q = q.order_by(RepoGroup.group_name)
2853 q = q.order_by(RepoGroup.group_name)
2845 return q.all()
2854 return q.all()
2846
2855
2847 @property
2856 @property
2848 def parents(self, parents_recursion_limit=10):
2857 def parents(self, parents_recursion_limit=10):
2849 groups = []
2858 groups = []
2850 if self.parent_group is None:
2859 if self.parent_group is None:
2851 return groups
2860 return groups
2852 cur_gr = self.parent_group
2861 cur_gr = self.parent_group
2853 groups.insert(0, cur_gr)
2862 groups.insert(0, cur_gr)
2854 cnt = 0
2863 cnt = 0
2855 while 1:
2864 while 1:
2856 cnt += 1
2865 cnt += 1
2857 gr = getattr(cur_gr, 'parent_group', None)
2866 gr = getattr(cur_gr, 'parent_group', None)
2858 cur_gr = cur_gr.parent_group
2867 cur_gr = cur_gr.parent_group
2859 if gr is None:
2868 if gr is None:
2860 break
2869 break
2861 if cnt == parents_recursion_limit:
2870 if cnt == parents_recursion_limit:
2862 # this will prevent accidental infinit loops
2871 # this will prevent accidental infinit loops
2863 log.error('more than %s parents found for group %s, stopping '
2872 log.error('more than %s parents found for group %s, stopping '
2864 'recursive parent fetching', parents_recursion_limit, self)
2873 'recursive parent fetching', parents_recursion_limit, self)
2865 break
2874 break
2866
2875
2867 groups.insert(0, gr)
2876 groups.insert(0, gr)
2868 return groups
2877 return groups
2869
2878
2870 @property
2879 @property
2871 def last_commit_cache_update_diff(self):
2880 def last_commit_cache_update_diff(self):
2872 return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0)
2881 return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0)
2873
2882
2874 @classmethod
2883 @classmethod
2875 def _load_commit_change(cls, last_commit_cache):
2884 def _load_commit_change(cls, last_commit_cache):
2876 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2885 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2877 empty_date = datetime.datetime.fromtimestamp(0)
2886 empty_date = datetime.datetime.fromtimestamp(0)
2878 date_latest = last_commit_cache.get('date', empty_date)
2887 date_latest = last_commit_cache.get('date', empty_date)
2879 try:
2888 try:
2880 return parse_datetime(date_latest)
2889 return parse_datetime(date_latest)
2881 except Exception:
2890 except Exception:
2882 return empty_date
2891 return empty_date
2883
2892
2884 @property
2893 @property
2885 def last_commit_change(self):
2894 def last_commit_change(self):
2886 return self._load_commit_change(self.changeset_cache)
2895 return self._load_commit_change(self.changeset_cache)
2887
2896
2888 @property
2897 @property
2889 def last_db_change(self):
2898 def last_db_change(self):
2890 return self.updated_on
2899 return self.updated_on
2891
2900
2892 @property
2901 @property
2893 def children(self):
2902 def children(self):
2894 return RepoGroup.query().filter(RepoGroup.parent_group == self)
2903 return RepoGroup.query().filter(RepoGroup.parent_group == self)
2895
2904
2896 @property
2905 @property
2897 def name(self):
2906 def name(self):
2898 return self.group_name.split(RepoGroup.url_sep())[-1]
2907 return self.group_name.split(RepoGroup.url_sep())[-1]
2899
2908
2900 @property
2909 @property
2901 def full_path(self):
2910 def full_path(self):
2902 return self.group_name
2911 return self.group_name
2903
2912
2904 @property
2913 @property
2905 def full_path_splitted(self):
2914 def full_path_splitted(self):
2906 return self.group_name.split(RepoGroup.url_sep())
2915 return self.group_name.split(RepoGroup.url_sep())
2907
2916
2908 @property
2917 @property
2909 def repositories(self):
2918 def repositories(self):
2910 return Repository.query()\
2919 return Repository.query()\
2911 .filter(Repository.group == self)\
2920 .filter(Repository.group == self)\
2912 .order_by(Repository.repo_name)
2921 .order_by(Repository.repo_name)
2913
2922
2914 @property
2923 @property
2915 def repositories_recursive_count(self):
2924 def repositories_recursive_count(self):
2916 cnt = self.repositories.count()
2925 cnt = self.repositories.count()
2917
2926
2918 def children_count(group):
2927 def children_count(group):
2919 cnt = 0
2928 cnt = 0
2920 for child in group.children:
2929 for child in group.children:
2921 cnt += child.repositories.count()
2930 cnt += child.repositories.count()
2922 cnt += children_count(child)
2931 cnt += children_count(child)
2923 return cnt
2932 return cnt
2924
2933
2925 return cnt + children_count(self)
2934 return cnt + children_count(self)
2926
2935
2927 def _recursive_objects(self, include_repos=True, include_groups=True):
2936 def _recursive_objects(self, include_repos=True, include_groups=True):
2928 all_ = []
2937 all_ = []
2929
2938
2930 def _get_members(root_gr):
2939 def _get_members(root_gr):
2931 if include_repos:
2940 if include_repos:
2932 for r in root_gr.repositories:
2941 for r in root_gr.repositories:
2933 all_.append(r)
2942 all_.append(r)
2934 childs = root_gr.children.all()
2943 childs = root_gr.children.all()
2935 if childs:
2944 if childs:
2936 for gr in childs:
2945 for gr in childs:
2937 if include_groups:
2946 if include_groups:
2938 all_.append(gr)
2947 all_.append(gr)
2939 _get_members(gr)
2948 _get_members(gr)
2940
2949
2941 root_group = []
2950 root_group = []
2942 if include_groups:
2951 if include_groups:
2943 root_group = [self]
2952 root_group = [self]
2944
2953
2945 _get_members(self)
2954 _get_members(self)
2946 return root_group + all_
2955 return root_group + all_
2947
2956
2948 def recursive_groups_and_repos(self):
2957 def recursive_groups_and_repos(self):
2949 """
2958 """
2950 Recursive return all groups, with repositories in those groups
2959 Recursive return all groups, with repositories in those groups
2951 """
2960 """
2952 return self._recursive_objects()
2961 return self._recursive_objects()
2953
2962
2954 def recursive_groups(self):
2963 def recursive_groups(self):
2955 """
2964 """
2956 Returns all children groups for this group including children of children
2965 Returns all children groups for this group including children of children
2957 """
2966 """
2958 return self._recursive_objects(include_repos=False)
2967 return self._recursive_objects(include_repos=False)
2959
2968
2960 def recursive_repos(self):
2969 def recursive_repos(self):
2961 """
2970 """
2962 Returns all children repositories for this group
2971 Returns all children repositories for this group
2963 """
2972 """
2964 return self._recursive_objects(include_groups=False)
2973 return self._recursive_objects(include_groups=False)
2965
2974
2966 def get_new_name(self, group_name):
2975 def get_new_name(self, group_name):
2967 """
2976 """
2968 returns new full group name based on parent and new name
2977 returns new full group name based on parent and new name
2969
2978
2970 :param group_name:
2979 :param group_name:
2971 """
2980 """
2972 path_prefix = (self.parent_group.full_path_splitted if
2981 path_prefix = (self.parent_group.full_path_splitted if
2973 self.parent_group else [])
2982 self.parent_group else [])
2974 return RepoGroup.url_sep().join(path_prefix + [group_name])
2983 return RepoGroup.url_sep().join(path_prefix + [group_name])
2975
2984
2976 def update_commit_cache(self, config=None):
2985 def update_commit_cache(self, config=None):
2977 """
2986 """
2978 Update cache of last commit for newest repository inside this repository group.
2987 Update cache of last commit for newest repository inside this repository group.
2979 cache_keys should be::
2988 cache_keys should be::
2980
2989
2981 source_repo_id
2990 source_repo_id
2982 short_id
2991 short_id
2983 raw_id
2992 raw_id
2984 revision
2993 revision
2985 parents
2994 parents
2986 message
2995 message
2987 date
2996 date
2988 author
2997 author
2989
2998
2990 """
2999 """
2991 from rhodecode.lib.vcs.utils.helpers import parse_datetime
3000 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2992 empty_date = datetime.datetime.fromtimestamp(0)
3001 empty_date = datetime.datetime.fromtimestamp(0)
2993
3002
2994 def repo_groups_and_repos(root_gr):
3003 def repo_groups_and_repos(root_gr):
2995 for _repo in root_gr.repositories:
3004 for _repo in root_gr.repositories:
2996 yield _repo
3005 yield _repo
2997 for child_group in root_gr.children.all():
3006 for child_group in root_gr.children.all():
2998 yield child_group
3007 yield child_group
2999
3008
3000 latest_repo_cs_cache = {}
3009 latest_repo_cs_cache = {}
3001 for obj in repo_groups_and_repos(self):
3010 for obj in repo_groups_and_repos(self):
3002 repo_cs_cache = obj.changeset_cache
3011 repo_cs_cache = obj.changeset_cache
3003 date_latest = latest_repo_cs_cache.get('date', empty_date)
3012 date_latest = latest_repo_cs_cache.get('date', empty_date)
3004 date_current = repo_cs_cache.get('date', empty_date)
3013 date_current = repo_cs_cache.get('date', empty_date)
3005 current_timestamp = datetime_to_time(parse_datetime(date_latest))
3014 current_timestamp = datetime_to_time(parse_datetime(date_latest))
3006 if current_timestamp < datetime_to_time(parse_datetime(date_current)):
3015 if current_timestamp < datetime_to_time(parse_datetime(date_current)):
3007 latest_repo_cs_cache = repo_cs_cache
3016 latest_repo_cs_cache = repo_cs_cache
3008 if hasattr(obj, 'repo_id'):
3017 if hasattr(obj, 'repo_id'):
3009 latest_repo_cs_cache['source_repo_id'] = obj.repo_id
3018 latest_repo_cs_cache['source_repo_id'] = obj.repo_id
3010 else:
3019 else:
3011 latest_repo_cs_cache['source_repo_id'] = repo_cs_cache.get('source_repo_id')
3020 latest_repo_cs_cache['source_repo_id'] = repo_cs_cache.get('source_repo_id')
3012
3021
3013 _date_latest = parse_datetime(latest_repo_cs_cache.get('date') or empty_date)
3022 _date_latest = parse_datetime(latest_repo_cs_cache.get('date') or empty_date)
3014
3023
3015 latest_repo_cs_cache['updated_on'] = time.time()
3024 latest_repo_cs_cache['updated_on'] = time.time()
3016 self.changeset_cache = latest_repo_cs_cache
3025 self.changeset_cache = latest_repo_cs_cache
3017 self.updated_on = _date_latest
3026 self.updated_on = _date_latest
3018 Session().add(self)
3027 Session().add(self)
3019 Session().commit()
3028 Session().commit()
3020
3029
3021 log.debug('updated repo group `%s` with new commit cache %s, and last update_date: %s',
3030 log.debug('updated repo group `%s` with new commit cache %s, and last update_date: %s',
3022 self.group_name, latest_repo_cs_cache, _date_latest)
3031 self.group_name, latest_repo_cs_cache, _date_latest)
3023
3032
3024 def permissions(self, with_admins=True, with_owner=True,
3033 def permissions(self, with_admins=True, with_owner=True,
3025 expand_from_user_groups=False):
3034 expand_from_user_groups=False):
3026 """
3035 """
3027 Permissions for repository groups
3036 Permissions for repository groups
3028 """
3037 """
3029 _admin_perm = 'group.admin'
3038 _admin_perm = 'group.admin'
3030
3039
3031 owner_row = []
3040 owner_row = []
3032 if with_owner:
3041 if with_owner:
3033 usr = AttributeDict(self.user.get_dict())
3042 usr = AttributeDict(self.user.get_dict())
3034 usr.owner_row = True
3043 usr.owner_row = True
3035 usr.permission = _admin_perm
3044 usr.permission = _admin_perm
3036 owner_row.append(usr)
3045 owner_row.append(usr)
3037
3046
3038 super_admin_ids = []
3047 super_admin_ids = []
3039 super_admin_rows = []
3048 super_admin_rows = []
3040 if with_admins:
3049 if with_admins:
3041 for usr in User.get_all_super_admins():
3050 for usr in User.get_all_super_admins():
3042 super_admin_ids.append(usr.user_id)
3051 super_admin_ids.append(usr.user_id)
3043 # if this admin is also owner, don't double the record
3052 # if this admin is also owner, don't double the record
3044 if usr.user_id == owner_row[0].user_id:
3053 if usr.user_id == owner_row[0].user_id:
3045 owner_row[0].admin_row = True
3054 owner_row[0].admin_row = True
3046 else:
3055 else:
3047 usr = AttributeDict(usr.get_dict())
3056 usr = AttributeDict(usr.get_dict())
3048 usr.admin_row = True
3057 usr.admin_row = True
3049 usr.permission = _admin_perm
3058 usr.permission = _admin_perm
3050 super_admin_rows.append(usr)
3059 super_admin_rows.append(usr)
3051
3060
3052 q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self)
3061 q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self)
3053 q = q.options(joinedload(UserRepoGroupToPerm.group),
3062 q = q.options(joinedload(UserRepoGroupToPerm.group),
3054 joinedload(UserRepoGroupToPerm.user),
3063 joinedload(UserRepoGroupToPerm.user),
3055 joinedload(UserRepoGroupToPerm.permission),)
3064 joinedload(UserRepoGroupToPerm.permission),)
3056
3065
3057 # get owners and admins and permissions. We do a trick of re-writing
3066 # get owners and admins and permissions. We do a trick of re-writing
3058 # objects from sqlalchemy to named-tuples due to sqlalchemy session
3067 # objects from sqlalchemy to named-tuples due to sqlalchemy session
3059 # has a global reference and changing one object propagates to all
3068 # has a global reference and changing one object propagates to all
3060 # others. This means if admin is also an owner admin_row that change
3069 # others. This means if admin is also an owner admin_row that change
3061 # would propagate to both objects
3070 # would propagate to both objects
3062 perm_rows = []
3071 perm_rows = []
3063 for _usr in q.all():
3072 for _usr in q.all():
3064 usr = AttributeDict(_usr.user.get_dict())
3073 usr = AttributeDict(_usr.user.get_dict())
3065 # if this user is also owner/admin, mark as duplicate record
3074 # if this user is also owner/admin, mark as duplicate record
3066 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
3075 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
3067 usr.duplicate_perm = True
3076 usr.duplicate_perm = True
3068 usr.permission = _usr.permission.permission_name
3077 usr.permission = _usr.permission.permission_name
3069 perm_rows.append(usr)
3078 perm_rows.append(usr)
3070
3079
3071 # filter the perm rows by 'default' first and then sort them by
3080 # filter the perm rows by 'default' first and then sort them by
3072 # admin,write,read,none permissions sorted again alphabetically in
3081 # admin,write,read,none permissions sorted again alphabetically in
3073 # each group
3082 # each group
3074 perm_rows = sorted(perm_rows, key=display_user_sort)
3083 perm_rows = sorted(perm_rows, key=display_user_sort)
3075
3084
3076 user_groups_rows = []
3085 user_groups_rows = []
3077 if expand_from_user_groups:
3086 if expand_from_user_groups:
3078 for ug in self.permission_user_groups(with_members=True):
3087 for ug in self.permission_user_groups(with_members=True):
3079 for user_data in ug.members:
3088 for user_data in ug.members:
3080 user_groups_rows.append(user_data)
3089 user_groups_rows.append(user_data)
3081
3090
3082 return super_admin_rows + owner_row + perm_rows + user_groups_rows
3091 return super_admin_rows + owner_row + perm_rows + user_groups_rows
3083
3092
3084 def permission_user_groups(self, with_members=False):
3093 def permission_user_groups(self, with_members=False):
3085 q = UserGroupRepoGroupToPerm.query()\
3094 q = UserGroupRepoGroupToPerm.query()\
3086 .filter(UserGroupRepoGroupToPerm.group == self)
3095 .filter(UserGroupRepoGroupToPerm.group == self)
3087 q = q.options(joinedload(UserGroupRepoGroupToPerm.group),
3096 q = q.options(joinedload(UserGroupRepoGroupToPerm.group),
3088 joinedload(UserGroupRepoGroupToPerm.users_group),
3097 joinedload(UserGroupRepoGroupToPerm.users_group),
3089 joinedload(UserGroupRepoGroupToPerm.permission),)
3098 joinedload(UserGroupRepoGroupToPerm.permission),)
3090
3099
3091 perm_rows = []
3100 perm_rows = []
3092 for _user_group in q.all():
3101 for _user_group in q.all():
3093 entry = AttributeDict(_user_group.users_group.get_dict())
3102 entry = AttributeDict(_user_group.users_group.get_dict())
3094 entry.permission = _user_group.permission.permission_name
3103 entry.permission = _user_group.permission.permission_name
3095 if with_members:
3104 if with_members:
3096 entry.members = [x.user.get_dict()
3105 entry.members = [x.user.get_dict()
3097 for x in _user_group.users_group.members]
3106 for x in _user_group.users_group.members]
3098 perm_rows.append(entry)
3107 perm_rows.append(entry)
3099
3108
3100 perm_rows = sorted(perm_rows, key=display_user_group_sort)
3109 perm_rows = sorted(perm_rows, key=display_user_group_sort)
3101 return perm_rows
3110 return perm_rows
3102
3111
3103 def get_api_data(self):
3112 def get_api_data(self):
3104 """
3113 """
3105 Common function for generating api data
3114 Common function for generating api data
3106
3115
3107 """
3116 """
3108 group = self
3117 group = self
3109 data = {
3118 data = {
3110 'group_id': group.group_id,
3119 'group_id': group.group_id,
3111 'group_name': group.group_name,
3120 'group_name': group.group_name,
3112 'group_description': group.description_safe,
3121 'group_description': group.description_safe,
3113 'parent_group': group.parent_group.group_name if group.parent_group else None,
3122 'parent_group': group.parent_group.group_name if group.parent_group else None,
3114 'repositories': [x.repo_name for x in group.repositories],
3123 'repositories': [x.repo_name for x in group.repositories],
3115 'owner': group.user.username,
3124 'owner': group.user.username,
3116 }
3125 }
3117 return data
3126 return data
3118
3127
3119 def get_dict(self):
3128 def get_dict(self):
3120 # Since we transformed `group_name` to a hybrid property, we need to
3129 # Since we transformed `group_name` to a hybrid property, we need to
3121 # keep compatibility with the code which uses `group_name` field.
3130 # keep compatibility with the code which uses `group_name` field.
3122 result = super(RepoGroup, self).get_dict()
3131 result = super(RepoGroup, self).get_dict()
3123 result['group_name'] = result.pop('_group_name', None)
3132 result['group_name'] = result.pop('_group_name', None)
3124 result.pop('_changeset_cache', '')
3133 result.pop('_changeset_cache', '')
3125 return result
3134 return result
3126
3135
3127
3136
3128 class Permission(Base, BaseModel):
3137 class Permission(Base, BaseModel):
3129 __tablename__ = 'permissions'
3138 __tablename__ = 'permissions'
3130 __table_args__ = (
3139 __table_args__ = (
3131 Index('p_perm_name_idx', 'permission_name'),
3140 Index('p_perm_name_idx', 'permission_name'),
3132 base_table_args,
3141 base_table_args,
3133 )
3142 )
3134
3143
3135 PERMS = [
3144 PERMS = [
3136 ('hg.admin', _('RhodeCode Super Administrator')),
3145 ('hg.admin', _('RhodeCode Super Administrator')),
3137
3146
3138 ('repository.none', _('Repository no access')),
3147 ('repository.none', _('Repository no access')),
3139 ('repository.read', _('Repository read access')),
3148 ('repository.read', _('Repository read access')),
3140 ('repository.write', _('Repository write access')),
3149 ('repository.write', _('Repository write access')),
3141 ('repository.admin', _('Repository admin access')),
3150 ('repository.admin', _('Repository admin access')),
3142
3151
3143 ('group.none', _('Repository group no access')),
3152 ('group.none', _('Repository group no access')),
3144 ('group.read', _('Repository group read access')),
3153 ('group.read', _('Repository group read access')),
3145 ('group.write', _('Repository group write access')),
3154 ('group.write', _('Repository group write access')),
3146 ('group.admin', _('Repository group admin access')),
3155 ('group.admin', _('Repository group admin access')),
3147
3156
3148 ('usergroup.none', _('User group no access')),
3157 ('usergroup.none', _('User group no access')),
3149 ('usergroup.read', _('User group read access')),
3158 ('usergroup.read', _('User group read access')),
3150 ('usergroup.write', _('User group write access')),
3159 ('usergroup.write', _('User group write access')),
3151 ('usergroup.admin', _('User group admin access')),
3160 ('usergroup.admin', _('User group admin access')),
3152
3161
3153 ('branch.none', _('Branch no permissions')),
3162 ('branch.none', _('Branch no permissions')),
3154 ('branch.merge', _('Branch access by web merge')),
3163 ('branch.merge', _('Branch access by web merge')),
3155 ('branch.push', _('Branch access by push')),
3164 ('branch.push', _('Branch access by push')),
3156 ('branch.push_force', _('Branch access by push with force')),
3165 ('branch.push_force', _('Branch access by push with force')),
3157
3166
3158 ('hg.repogroup.create.false', _('Repository Group creation disabled')),
3167 ('hg.repogroup.create.false', _('Repository Group creation disabled')),
3159 ('hg.repogroup.create.true', _('Repository Group creation enabled')),
3168 ('hg.repogroup.create.true', _('Repository Group creation enabled')),
3160
3169
3161 ('hg.usergroup.create.false', _('User Group creation disabled')),
3170 ('hg.usergroup.create.false', _('User Group creation disabled')),
3162 ('hg.usergroup.create.true', _('User Group creation enabled')),
3171 ('hg.usergroup.create.true', _('User Group creation enabled')),
3163
3172
3164 ('hg.create.none', _('Repository creation disabled')),
3173 ('hg.create.none', _('Repository creation disabled')),
3165 ('hg.create.repository', _('Repository creation enabled')),
3174 ('hg.create.repository', _('Repository creation enabled')),
3166 ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
3175 ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
3167 ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
3176 ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
3168
3177
3169 ('hg.fork.none', _('Repository forking disabled')),
3178 ('hg.fork.none', _('Repository forking disabled')),
3170 ('hg.fork.repository', _('Repository forking enabled')),
3179 ('hg.fork.repository', _('Repository forking enabled')),
3171
3180
3172 ('hg.register.none', _('Registration disabled')),
3181 ('hg.register.none', _('Registration disabled')),
3173 ('hg.register.manual_activate', _('User Registration with manual account activation')),
3182 ('hg.register.manual_activate', _('User Registration with manual account activation')),
3174 ('hg.register.auto_activate', _('User Registration with automatic account activation')),
3183 ('hg.register.auto_activate', _('User Registration with automatic account activation')),
3175
3184
3176 ('hg.password_reset.enabled', _('Password reset enabled')),
3185 ('hg.password_reset.enabled', _('Password reset enabled')),
3177 ('hg.password_reset.hidden', _('Password reset hidden')),
3186 ('hg.password_reset.hidden', _('Password reset hidden')),
3178 ('hg.password_reset.disabled', _('Password reset disabled')),
3187 ('hg.password_reset.disabled', _('Password reset disabled')),
3179
3188
3180 ('hg.extern_activate.manual', _('Manual activation of external account')),
3189 ('hg.extern_activate.manual', _('Manual activation of external account')),
3181 ('hg.extern_activate.auto', _('Automatic activation of external account')),
3190 ('hg.extern_activate.auto', _('Automatic activation of external account')),
3182
3191
3183 ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')),
3192 ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')),
3184 ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')),
3193 ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')),
3185 ]
3194 ]
3186
3195
3187 # definition of system default permissions for DEFAULT user, created on
3196 # definition of system default permissions for DEFAULT user, created on
3188 # system setup
3197 # system setup
3189 DEFAULT_USER_PERMISSIONS = [
3198 DEFAULT_USER_PERMISSIONS = [
3190 # object perms
3199 # object perms
3191 'repository.read',
3200 'repository.read',
3192 'group.read',
3201 'group.read',
3193 'usergroup.read',
3202 'usergroup.read',
3194 # branch, for backward compat we need same value as before so forced pushed
3203 # branch, for backward compat we need same value as before so forced pushed
3195 'branch.push_force',
3204 'branch.push_force',
3196 # global
3205 # global
3197 'hg.create.repository',
3206 'hg.create.repository',
3198 'hg.repogroup.create.false',
3207 'hg.repogroup.create.false',
3199 'hg.usergroup.create.false',
3208 'hg.usergroup.create.false',
3200 'hg.create.write_on_repogroup.true',
3209 'hg.create.write_on_repogroup.true',
3201 'hg.fork.repository',
3210 'hg.fork.repository',
3202 'hg.register.manual_activate',
3211 'hg.register.manual_activate',
3203 'hg.password_reset.enabled',
3212 'hg.password_reset.enabled',
3204 'hg.extern_activate.auto',
3213 'hg.extern_activate.auto',
3205 'hg.inherit_default_perms.true',
3214 'hg.inherit_default_perms.true',
3206 ]
3215 ]
3207
3216
3208 # defines which permissions are more important higher the more important
3217 # defines which permissions are more important higher the more important
3209 # Weight defines which permissions are more important.
3218 # Weight defines which permissions are more important.
3210 # The higher number the more important.
3219 # The higher number the more important.
3211 PERM_WEIGHTS = {
3220 PERM_WEIGHTS = {
3212 'repository.none': 0,
3221 'repository.none': 0,
3213 'repository.read': 1,
3222 'repository.read': 1,
3214 'repository.write': 3,
3223 'repository.write': 3,
3215 'repository.admin': 4,
3224 'repository.admin': 4,
3216
3225
3217 'group.none': 0,
3226 'group.none': 0,
3218 'group.read': 1,
3227 'group.read': 1,
3219 'group.write': 3,
3228 'group.write': 3,
3220 'group.admin': 4,
3229 'group.admin': 4,
3221
3230
3222 'usergroup.none': 0,
3231 'usergroup.none': 0,
3223 'usergroup.read': 1,
3232 'usergroup.read': 1,
3224 'usergroup.write': 3,
3233 'usergroup.write': 3,
3225 'usergroup.admin': 4,
3234 'usergroup.admin': 4,
3226
3235
3227 'branch.none': 0,
3236 'branch.none': 0,
3228 'branch.merge': 1,
3237 'branch.merge': 1,
3229 'branch.push': 3,
3238 'branch.push': 3,
3230 'branch.push_force': 4,
3239 'branch.push_force': 4,
3231
3240
3232 'hg.repogroup.create.false': 0,
3241 'hg.repogroup.create.false': 0,
3233 'hg.repogroup.create.true': 1,
3242 'hg.repogroup.create.true': 1,
3234
3243
3235 'hg.usergroup.create.false': 0,
3244 'hg.usergroup.create.false': 0,
3236 'hg.usergroup.create.true': 1,
3245 'hg.usergroup.create.true': 1,
3237
3246
3238 'hg.fork.none': 0,
3247 'hg.fork.none': 0,
3239 'hg.fork.repository': 1,
3248 'hg.fork.repository': 1,
3240 'hg.create.none': 0,
3249 'hg.create.none': 0,
3241 'hg.create.repository': 1
3250 'hg.create.repository': 1
3242 }
3251 }
3243
3252
3244 permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3253 permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3245 permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None)
3254 permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None)
3246 permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None)
3255 permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None)
3247
3256
3248 def __repr__(self):
3257 def __repr__(self):
3249 return "<%s('%s:%s')>" % (
3258 return "<%s('%s:%s')>" % (
3250 self.cls_name, self.permission_id, self.permission_name
3259 self.cls_name, self.permission_id, self.permission_name
3251 )
3260 )
3252
3261
3253 @classmethod
3262 @classmethod
3254 def get_by_key(cls, key):
3263 def get_by_key(cls, key):
3255 return cls.query().filter(cls.permission_name == key).scalar()
3264 return cls.query().filter(cls.permission_name == key).scalar()
3256
3265
3257 @classmethod
3266 @classmethod
3258 def get_default_repo_perms(cls, user_id, repo_id=None):
3267 def get_default_repo_perms(cls, user_id, repo_id=None):
3259 q = Session().query(UserRepoToPerm, Repository, Permission)\
3268 q = Session().query(UserRepoToPerm, Repository, Permission)\
3260 .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\
3269 .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\
3261 .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
3270 .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
3262 .filter(UserRepoToPerm.user_id == user_id)
3271 .filter(UserRepoToPerm.user_id == user_id)
3263 if repo_id:
3272 if repo_id:
3264 q = q.filter(UserRepoToPerm.repository_id == repo_id)
3273 q = q.filter(UserRepoToPerm.repository_id == repo_id)
3265 return q.all()
3274 return q.all()
3266
3275
3267 @classmethod
3276 @classmethod
3268 def get_default_repo_branch_perms(cls, user_id, repo_id=None):
3277 def get_default_repo_branch_perms(cls, user_id, repo_id=None):
3269 q = Session().query(UserToRepoBranchPermission, UserRepoToPerm, Permission) \
3278 q = Session().query(UserToRepoBranchPermission, UserRepoToPerm, Permission) \
3270 .join(
3279 .join(
3271 Permission,
3280 Permission,
3272 UserToRepoBranchPermission.permission_id == Permission.permission_id) \
3281 UserToRepoBranchPermission.permission_id == Permission.permission_id) \
3273 .join(
3282 .join(
3274 UserRepoToPerm,
3283 UserRepoToPerm,
3275 UserToRepoBranchPermission.rule_to_perm_id == UserRepoToPerm.repo_to_perm_id) \
3284 UserToRepoBranchPermission.rule_to_perm_id == UserRepoToPerm.repo_to_perm_id) \
3276 .filter(UserRepoToPerm.user_id == user_id)
3285 .filter(UserRepoToPerm.user_id == user_id)
3277
3286
3278 if repo_id:
3287 if repo_id:
3279 q = q.filter(UserToRepoBranchPermission.repository_id == repo_id)
3288 q = q.filter(UserToRepoBranchPermission.repository_id == repo_id)
3280 return q.order_by(UserToRepoBranchPermission.rule_order).all()
3289 return q.order_by(UserToRepoBranchPermission.rule_order).all()
3281
3290
3282 @classmethod
3291 @classmethod
3283 def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None):
3292 def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None):
3284 q = Session().query(UserGroupRepoToPerm, Repository, Permission)\
3293 q = Session().query(UserGroupRepoToPerm, Repository, Permission)\
3285 .join(
3294 .join(
3286 Permission,
3295 Permission,
3287 UserGroupRepoToPerm.permission_id == Permission.permission_id)\
3296 UserGroupRepoToPerm.permission_id == Permission.permission_id)\
3288 .join(
3297 .join(
3289 Repository,
3298 Repository,
3290 UserGroupRepoToPerm.repository_id == Repository.repo_id)\
3299 UserGroupRepoToPerm.repository_id == Repository.repo_id)\
3291 .join(
3300 .join(
3292 UserGroup,
3301 UserGroup,
3293 UserGroupRepoToPerm.users_group_id ==
3302 UserGroupRepoToPerm.users_group_id ==
3294 UserGroup.users_group_id)\
3303 UserGroup.users_group_id)\
3295 .join(
3304 .join(
3296 UserGroupMember,
3305 UserGroupMember,
3297 UserGroupRepoToPerm.users_group_id ==
3306 UserGroupRepoToPerm.users_group_id ==
3298 UserGroupMember.users_group_id)\
3307 UserGroupMember.users_group_id)\
3299 .filter(
3308 .filter(
3300 UserGroupMember.user_id == user_id,
3309 UserGroupMember.user_id == user_id,
3301 UserGroup.users_group_active == true())
3310 UserGroup.users_group_active == true())
3302 if repo_id:
3311 if repo_id:
3303 q = q.filter(UserGroupRepoToPerm.repository_id == repo_id)
3312 q = q.filter(UserGroupRepoToPerm.repository_id == repo_id)
3304 return q.all()
3313 return q.all()
3305
3314
3306 @classmethod
3315 @classmethod
3307 def get_default_repo_branch_perms_from_user_group(cls, user_id, repo_id=None):
3316 def get_default_repo_branch_perms_from_user_group(cls, user_id, repo_id=None):
3308 q = Session().query(UserGroupToRepoBranchPermission, UserGroupRepoToPerm, Permission) \
3317 q = Session().query(UserGroupToRepoBranchPermission, UserGroupRepoToPerm, Permission) \
3309 .join(
3318 .join(
3310 Permission,
3319 Permission,
3311 UserGroupToRepoBranchPermission.permission_id == Permission.permission_id) \
3320 UserGroupToRepoBranchPermission.permission_id == Permission.permission_id) \
3312 .join(
3321 .join(
3313 UserGroupRepoToPerm,
3322 UserGroupRepoToPerm,
3314 UserGroupToRepoBranchPermission.rule_to_perm_id == UserGroupRepoToPerm.users_group_to_perm_id) \
3323 UserGroupToRepoBranchPermission.rule_to_perm_id == UserGroupRepoToPerm.users_group_to_perm_id) \
3315 .join(
3324 .join(
3316 UserGroup,
3325 UserGroup,
3317 UserGroupRepoToPerm.users_group_id == UserGroup.users_group_id) \
3326 UserGroupRepoToPerm.users_group_id == UserGroup.users_group_id) \
3318 .join(
3327 .join(
3319 UserGroupMember,
3328 UserGroupMember,
3320 UserGroupRepoToPerm.users_group_id == UserGroupMember.users_group_id) \
3329 UserGroupRepoToPerm.users_group_id == UserGroupMember.users_group_id) \
3321 .filter(
3330 .filter(
3322 UserGroupMember.user_id == user_id,
3331 UserGroupMember.user_id == user_id,
3323 UserGroup.users_group_active == true())
3332 UserGroup.users_group_active == true())
3324
3333
3325 if repo_id:
3334 if repo_id:
3326 q = q.filter(UserGroupToRepoBranchPermission.repository_id == repo_id)
3335 q = q.filter(UserGroupToRepoBranchPermission.repository_id == repo_id)
3327 return q.order_by(UserGroupToRepoBranchPermission.rule_order).all()
3336 return q.order_by(UserGroupToRepoBranchPermission.rule_order).all()
3328
3337
3329 @classmethod
3338 @classmethod
3330 def get_default_group_perms(cls, user_id, repo_group_id=None):
3339 def get_default_group_perms(cls, user_id, repo_group_id=None):
3331 q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\
3340 q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\
3332 .join(
3341 .join(
3333 Permission,
3342 Permission,
3334 UserRepoGroupToPerm.permission_id == Permission.permission_id)\
3343 UserRepoGroupToPerm.permission_id == Permission.permission_id)\
3335 .join(
3344 .join(
3336 RepoGroup,
3345 RepoGroup,
3337 UserRepoGroupToPerm.group_id == RepoGroup.group_id)\
3346 UserRepoGroupToPerm.group_id == RepoGroup.group_id)\
3338 .filter(UserRepoGroupToPerm.user_id == user_id)
3347 .filter(UserRepoGroupToPerm.user_id == user_id)
3339 if repo_group_id:
3348 if repo_group_id:
3340 q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id)
3349 q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id)
3341 return q.all()
3350 return q.all()
3342
3351
3343 @classmethod
3352 @classmethod
3344 def get_default_group_perms_from_user_group(
3353 def get_default_group_perms_from_user_group(
3345 cls, user_id, repo_group_id=None):
3354 cls, user_id, repo_group_id=None):
3346 q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\
3355 q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\
3347 .join(
3356 .join(
3348 Permission,
3357 Permission,
3349 UserGroupRepoGroupToPerm.permission_id ==
3358 UserGroupRepoGroupToPerm.permission_id ==
3350 Permission.permission_id)\
3359 Permission.permission_id)\
3351 .join(
3360 .join(
3352 RepoGroup,
3361 RepoGroup,
3353 UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\
3362 UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\
3354 .join(
3363 .join(
3355 UserGroup,
3364 UserGroup,
3356 UserGroupRepoGroupToPerm.users_group_id ==
3365 UserGroupRepoGroupToPerm.users_group_id ==
3357 UserGroup.users_group_id)\
3366 UserGroup.users_group_id)\
3358 .join(
3367 .join(
3359 UserGroupMember,
3368 UserGroupMember,
3360 UserGroupRepoGroupToPerm.users_group_id ==
3369 UserGroupRepoGroupToPerm.users_group_id ==
3361 UserGroupMember.users_group_id)\
3370 UserGroupMember.users_group_id)\
3362 .filter(
3371 .filter(
3363 UserGroupMember.user_id == user_id,
3372 UserGroupMember.user_id == user_id,
3364 UserGroup.users_group_active == true())
3373 UserGroup.users_group_active == true())
3365 if repo_group_id:
3374 if repo_group_id:
3366 q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id)
3375 q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id)
3367 return q.all()
3376 return q.all()
3368
3377
3369 @classmethod
3378 @classmethod
3370 def get_default_user_group_perms(cls, user_id, user_group_id=None):
3379 def get_default_user_group_perms(cls, user_id, user_group_id=None):
3371 q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\
3380 q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\
3372 .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\
3381 .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\
3373 .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\
3382 .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\
3374 .filter(UserUserGroupToPerm.user_id == user_id)
3383 .filter(UserUserGroupToPerm.user_id == user_id)
3375 if user_group_id:
3384 if user_group_id:
3376 q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id)
3385 q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id)
3377 return q.all()
3386 return q.all()
3378
3387
3379 @classmethod
3388 @classmethod
3380 def get_default_user_group_perms_from_user_group(
3389 def get_default_user_group_perms_from_user_group(
3381 cls, user_id, user_group_id=None):
3390 cls, user_id, user_group_id=None):
3382 TargetUserGroup = aliased(UserGroup, name='target_user_group')
3391 TargetUserGroup = aliased(UserGroup, name='target_user_group')
3383 q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\
3392 q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\
3384 .join(
3393 .join(
3385 Permission,
3394 Permission,
3386 UserGroupUserGroupToPerm.permission_id ==
3395 UserGroupUserGroupToPerm.permission_id ==
3387 Permission.permission_id)\
3396 Permission.permission_id)\
3388 .join(
3397 .join(
3389 TargetUserGroup,
3398 TargetUserGroup,
3390 UserGroupUserGroupToPerm.target_user_group_id ==
3399 UserGroupUserGroupToPerm.target_user_group_id ==
3391 TargetUserGroup.users_group_id)\
3400 TargetUserGroup.users_group_id)\
3392 .join(
3401 .join(
3393 UserGroup,
3402 UserGroup,
3394 UserGroupUserGroupToPerm.user_group_id ==
3403 UserGroupUserGroupToPerm.user_group_id ==
3395 UserGroup.users_group_id)\
3404 UserGroup.users_group_id)\
3396 .join(
3405 .join(
3397 UserGroupMember,
3406 UserGroupMember,
3398 UserGroupUserGroupToPerm.user_group_id ==
3407 UserGroupUserGroupToPerm.user_group_id ==
3399 UserGroupMember.users_group_id)\
3408 UserGroupMember.users_group_id)\
3400 .filter(
3409 .filter(
3401 UserGroupMember.user_id == user_id,
3410 UserGroupMember.user_id == user_id,
3402 UserGroup.users_group_active == true())
3411 UserGroup.users_group_active == true())
3403 if user_group_id:
3412 if user_group_id:
3404 q = q.filter(
3413 q = q.filter(
3405 UserGroupUserGroupToPerm.user_group_id == user_group_id)
3414 UserGroupUserGroupToPerm.user_group_id == user_group_id)
3406
3415
3407 return q.all()
3416 return q.all()
3408
3417
3409
3418
3410 class UserRepoToPerm(Base, BaseModel):
3419 class UserRepoToPerm(Base, BaseModel):
3411 __tablename__ = 'repo_to_perm'
3420 __tablename__ = 'repo_to_perm'
3412 __table_args__ = (
3421 __table_args__ = (
3413 UniqueConstraint('user_id', 'repository_id', 'permission_id'),
3422 UniqueConstraint('user_id', 'repository_id', 'permission_id'),
3414 base_table_args
3423 base_table_args
3415 )
3424 )
3416
3425
3417 repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3426 repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3418 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3427 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3419 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3428 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3420 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
3429 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
3421
3430
3422 user = relationship('User', back_populates="repo_to_perm")
3431 user = relationship('User', back_populates="repo_to_perm")
3423 repository = relationship('Repository', back_populates="repo_to_perm")
3432 repository = relationship('Repository', back_populates="repo_to_perm")
3424 permission = relationship('Permission')
3433 permission = relationship('Permission')
3425
3434
3426 branch_perm_entry = relationship('UserToRepoBranchPermission', cascade="all, delete-orphan", lazy='joined', back_populates='user_repo_to_perm')
3435 branch_perm_entry = relationship('UserToRepoBranchPermission', cascade="all, delete-orphan", lazy='joined', back_populates='user_repo_to_perm')
3427
3436
3428 @classmethod
3437 @classmethod
3429 def create(cls, user, repository, permission):
3438 def create(cls, user, repository, permission):
3430 n = cls()
3439 n = cls()
3431 n.user = user
3440 n.user = user
3432 n.repository = repository
3441 n.repository = repository
3433 n.permission = permission
3442 n.permission = permission
3434 Session().add(n)
3443 Session().add(n)
3435 return n
3444 return n
3436
3445
3437 def __repr__(self):
3446 def __repr__(self):
3438 return f'<{self.user} => {self.repository} >'
3447 return f'<{self.user} => {self.repository} >'
3439
3448
3440
3449
3441 class UserUserGroupToPerm(Base, BaseModel):
3450 class UserUserGroupToPerm(Base, BaseModel):
3442 __tablename__ = 'user_user_group_to_perm'
3451 __tablename__ = 'user_user_group_to_perm'
3443 __table_args__ = (
3452 __table_args__ = (
3444 UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
3453 UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
3445 base_table_args
3454 base_table_args
3446 )
3455 )
3447
3456
3448 user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3457 user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3449 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3458 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3450 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3459 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3451 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3460 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3452
3461
3453 user = relationship('User', back_populates='user_group_to_perm')
3462 user = relationship('User', back_populates='user_group_to_perm')
3454 user_group = relationship('UserGroup', back_populates='user_user_group_to_perm')
3463 user_group = relationship('UserGroup', back_populates='user_user_group_to_perm')
3455 permission = relationship('Permission')
3464 permission = relationship('Permission')
3456
3465
3457 @classmethod
3466 @classmethod
3458 def create(cls, user, user_group, permission):
3467 def create(cls, user, user_group, permission):
3459 n = cls()
3468 n = cls()
3460 n.user = user
3469 n.user = user
3461 n.user_group = user_group
3470 n.user_group = user_group
3462 n.permission = permission
3471 n.permission = permission
3463 Session().add(n)
3472 Session().add(n)
3464 return n
3473 return n
3465
3474
3466 def __repr__(self):
3475 def __repr__(self):
3467 return f'<{self.user} => {self.user_group} >'
3476 return f'<{self.user} => {self.user_group} >'
3468
3477
3469
3478
3470 class UserToPerm(Base, BaseModel):
3479 class UserToPerm(Base, BaseModel):
3471 __tablename__ = 'user_to_perm'
3480 __tablename__ = 'user_to_perm'
3472 __table_args__ = (
3481 __table_args__ = (
3473 UniqueConstraint('user_id', 'permission_id'),
3482 UniqueConstraint('user_id', 'permission_id'),
3474 base_table_args
3483 base_table_args
3475 )
3484 )
3476
3485
3477 user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3486 user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3478 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3487 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3479 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3488 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3480
3489
3481 user = relationship('User', back_populates='user_perms')
3490 user = relationship('User', back_populates='user_perms')
3482 permission = relationship('Permission', lazy='joined')
3491 permission = relationship('Permission', lazy='joined')
3483
3492
3484 def __repr__(self):
3493 def __repr__(self):
3485 return f'<{self.user} => {self.permission} >'
3494 return f'<{self.user} => {self.permission} >'
3486
3495
3487
3496
3488 class UserGroupRepoToPerm(Base, BaseModel):
3497 class UserGroupRepoToPerm(Base, BaseModel):
3489 __tablename__ = 'users_group_repo_to_perm'
3498 __tablename__ = 'users_group_repo_to_perm'
3490 __table_args__ = (
3499 __table_args__ = (
3491 UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
3500 UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
3492 base_table_args
3501 base_table_args
3493 )
3502 )
3494
3503
3495 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3504 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3496 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3505 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3497 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3506 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3498 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
3507 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
3499
3508
3500 users_group = relationship('UserGroup', back_populates='users_group_repo_to_perm')
3509 users_group = relationship('UserGroup', back_populates='users_group_repo_to_perm')
3501 permission = relationship('Permission')
3510 permission = relationship('Permission')
3502 repository = relationship('Repository', back_populates='users_group_to_perm')
3511 repository = relationship('Repository', back_populates='users_group_to_perm')
3503 user_group_branch_perms = relationship('UserGroupToRepoBranchPermission', cascade='all', back_populates='user_group_repo_to_perm')
3512 user_group_branch_perms = relationship('UserGroupToRepoBranchPermission', cascade='all', back_populates='user_group_repo_to_perm')
3504
3513
3505 @classmethod
3514 @classmethod
3506 def create(cls, users_group, repository, permission):
3515 def create(cls, users_group, repository, permission):
3507 n = cls()
3516 n = cls()
3508 n.users_group = users_group
3517 n.users_group = users_group
3509 n.repository = repository
3518 n.repository = repository
3510 n.permission = permission
3519 n.permission = permission
3511 Session().add(n)
3520 Session().add(n)
3512 return n
3521 return n
3513
3522
3514 def __repr__(self):
3523 def __repr__(self):
3515 return f'<UserGroupRepoToPerm:{self.users_group} => {self.repository} >'
3524 return f'<UserGroupRepoToPerm:{self.users_group} => {self.repository} >'
3516
3525
3517
3526
3518 class UserGroupUserGroupToPerm(Base, BaseModel):
3527 class UserGroupUserGroupToPerm(Base, BaseModel):
3519 __tablename__ = 'user_group_user_group_to_perm'
3528 __tablename__ = 'user_group_user_group_to_perm'
3520 __table_args__ = (
3529 __table_args__ = (
3521 UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
3530 UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
3522 CheckConstraint('target_user_group_id != user_group_id'),
3531 CheckConstraint('target_user_group_id != user_group_id'),
3523 base_table_args
3532 base_table_args
3524 )
3533 )
3525
3534
3526 user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3535 user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3527 target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3536 target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3528 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3537 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3529 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3538 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3530
3539
3531 target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id', back_populates='user_group_user_group_to_perm')
3540 target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id', back_populates='user_group_user_group_to_perm')
3532 user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
3541 user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
3533 permission = relationship('Permission')
3542 permission = relationship('Permission')
3534
3543
3535 @classmethod
3544 @classmethod
3536 def create(cls, target_user_group, user_group, permission):
3545 def create(cls, target_user_group, user_group, permission):
3537 n = cls()
3546 n = cls()
3538 n.target_user_group = target_user_group
3547 n.target_user_group = target_user_group
3539 n.user_group = user_group
3548 n.user_group = user_group
3540 n.permission = permission
3549 n.permission = permission
3541 Session().add(n)
3550 Session().add(n)
3542 return n
3551 return n
3543
3552
3544 def __repr__(self):
3553 def __repr__(self):
3545 return f'<UserGroupUserGroup:{self.target_user_group} => {self.user_group} >'
3554 return f'<UserGroupUserGroup:{self.target_user_group} => {self.user_group} >'
3546
3555
3547
3556
3548 class UserGroupToPerm(Base, BaseModel):
3557 class UserGroupToPerm(Base, BaseModel):
3549 __tablename__ = 'users_group_to_perm'
3558 __tablename__ = 'users_group_to_perm'
3550 __table_args__ = (
3559 __table_args__ = (
3551 UniqueConstraint('users_group_id', 'permission_id',),
3560 UniqueConstraint('users_group_id', 'permission_id',),
3552 base_table_args
3561 base_table_args
3553 )
3562 )
3554
3563
3555 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3564 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3556 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3565 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3557 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3566 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3558
3567
3559 users_group = relationship('UserGroup', back_populates='users_group_to_perm')
3568 users_group = relationship('UserGroup', back_populates='users_group_to_perm')
3560 permission = relationship('Permission')
3569 permission = relationship('Permission')
3561
3570
3562
3571
3563 class UserRepoGroupToPerm(Base, BaseModel):
3572 class UserRepoGroupToPerm(Base, BaseModel):
3564 __tablename__ = 'user_repo_group_to_perm'
3573 __tablename__ = 'user_repo_group_to_perm'
3565 __table_args__ = (
3574 __table_args__ = (
3566 UniqueConstraint('user_id', 'group_id', 'permission_id'),
3575 UniqueConstraint('user_id', 'group_id', 'permission_id'),
3567 base_table_args
3576 base_table_args
3568 )
3577 )
3569
3578
3570 group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3579 group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3571 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3580 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3572 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3581 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3573 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3582 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3574
3583
3575 user = relationship('User', back_populates='repo_group_to_perm')
3584 user = relationship('User', back_populates='repo_group_to_perm')
3576 group = relationship('RepoGroup', back_populates='repo_group_to_perm')
3585 group = relationship('RepoGroup', back_populates='repo_group_to_perm')
3577 permission = relationship('Permission')
3586 permission = relationship('Permission')
3578
3587
3579 @classmethod
3588 @classmethod
3580 def create(cls, user, repository_group, permission):
3589 def create(cls, user, repository_group, permission):
3581 n = cls()
3590 n = cls()
3582 n.user = user
3591 n.user = user
3583 n.group = repository_group
3592 n.group = repository_group
3584 n.permission = permission
3593 n.permission = permission
3585 Session().add(n)
3594 Session().add(n)
3586 return n
3595 return n
3587
3596
3588
3597
3589 class UserGroupRepoGroupToPerm(Base, BaseModel):
3598 class UserGroupRepoGroupToPerm(Base, BaseModel):
3590 __tablename__ = 'users_group_repo_group_to_perm'
3599 __tablename__ = 'users_group_repo_group_to_perm'
3591 __table_args__ = (
3600 __table_args__ = (
3592 UniqueConstraint('users_group_id', 'group_id'),
3601 UniqueConstraint('users_group_id', 'group_id'),
3593 base_table_args
3602 base_table_args
3594 )
3603 )
3595
3604
3596 users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3605 users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3597 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3606 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3598 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3607 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3599 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3608 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3600
3609
3601 users_group = relationship('UserGroup', back_populates='users_group_repo_group_to_perm')
3610 users_group = relationship('UserGroup', back_populates='users_group_repo_group_to_perm')
3602 permission = relationship('Permission')
3611 permission = relationship('Permission')
3603 group = relationship('RepoGroup', back_populates='users_group_to_perm')
3612 group = relationship('RepoGroup', back_populates='users_group_to_perm')
3604
3613
3605 @classmethod
3614 @classmethod
3606 def create(cls, user_group, repository_group, permission):
3615 def create(cls, user_group, repository_group, permission):
3607 n = cls()
3616 n = cls()
3608 n.users_group = user_group
3617 n.users_group = user_group
3609 n.group = repository_group
3618 n.group = repository_group
3610 n.permission = permission
3619 n.permission = permission
3611 Session().add(n)
3620 Session().add(n)
3612 return n
3621 return n
3613
3622
3614 def __repr__(self):
3623 def __repr__(self):
3615 return '<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group)
3624 return '<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group)
3616
3625
3617
3626
3618 class Statistics(Base, BaseModel):
3627 class Statistics(Base, BaseModel):
3619 __tablename__ = 'statistics'
3628 __tablename__ = 'statistics'
3620 __table_args__ = (
3629 __table_args__ = (
3621 base_table_args
3630 base_table_args
3622 )
3631 )
3623
3632
3624 stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3633 stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3625 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
3634 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
3626 stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
3635 stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
3627 commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False) #JSON data
3636 commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False) #JSON data
3628 commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False) #JSON data
3637 commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False) #JSON data
3629 languages = Column("languages", LargeBinary(1000000), nullable=False) #JSON data
3638 languages = Column("languages", LargeBinary(1000000), nullable=False) #JSON data
3630
3639
3631 repository = relationship('Repository', single_parent=True, viewonly=True)
3640 repository = relationship('Repository', single_parent=True, viewonly=True)
3632
3641
3633
3642
3634 class UserFollowing(Base, BaseModel):
3643 class UserFollowing(Base, BaseModel):
3635 __tablename__ = 'user_followings'
3644 __tablename__ = 'user_followings'
3636 __table_args__ = (
3645 __table_args__ = (
3637 UniqueConstraint('user_id', 'follows_repository_id'),
3646 UniqueConstraint('user_id', 'follows_repository_id'),
3638 UniqueConstraint('user_id', 'follows_user_id'),
3647 UniqueConstraint('user_id', 'follows_user_id'),
3639 base_table_args
3648 base_table_args
3640 )
3649 )
3641
3650
3642 user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3651 user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3643 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3652 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3644 follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
3653 follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
3645 follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
3654 follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
3646 follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
3655 follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
3647
3656
3648 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id', back_populates='followings')
3657 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id', back_populates='followings')
3649
3658
3650 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
3659 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
3651 follows_repository = relationship('Repository', order_by='Repository.repo_name', back_populates='followers')
3660 follows_repository = relationship('Repository', order_by='Repository.repo_name', back_populates='followers')
3652
3661
3653 @classmethod
3662 @classmethod
3654 def get_repo_followers(cls, repo_id):
3663 def get_repo_followers(cls, repo_id):
3655 return cls.query().filter(cls.follows_repo_id == repo_id)
3664 return cls.query().filter(cls.follows_repo_id == repo_id)
3656
3665
3657
3666
3658 class CacheKey(Base, BaseModel):
3667 class CacheKey(Base, BaseModel):
3659 __tablename__ = 'cache_invalidation'
3668 __tablename__ = 'cache_invalidation'
3660 __table_args__ = (
3669 __table_args__ = (
3661 UniqueConstraint('cache_key'),
3670 UniqueConstraint('cache_key'),
3662 Index('key_idx', 'cache_key'),
3671 Index('key_idx', 'cache_key'),
3663 Index('cache_args_idx', 'cache_args'),
3672 Index('cache_args_idx', 'cache_args'),
3664 base_table_args,
3673 base_table_args,
3665 )
3674 )
3666
3675
3667 CACHE_TYPE_FEED = 'FEED'
3676 CACHE_TYPE_FEED = 'FEED'
3668
3677
3669 # namespaces used to register process/thread aware caches
3678 # namespaces used to register process/thread aware caches
3670 REPO_INVALIDATION_NAMESPACE = 'repo_cache.v1:{repo_id}'
3679 REPO_INVALIDATION_NAMESPACE = 'repo_cache.v1:{repo_id}'
3671
3680
3672 cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3681 cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3673 cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None)
3682 cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None)
3674 cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None)
3683 cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None)
3675 cache_state_uid = Column("cache_state_uid", String(255), nullable=True, unique=None, default=None)
3684 cache_state_uid = Column("cache_state_uid", String(255), nullable=True, unique=None, default=None)
3676 cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
3685 cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
3677
3686
3678 def __init__(self, cache_key, cache_args='', cache_state_uid=None):
3687 def __init__(self, cache_key, cache_args='', cache_state_uid=None, cache_active=False):
3679 self.cache_key = cache_key
3688 self.cache_key = cache_key
3680 self.cache_args = cache_args
3689 self.cache_args = cache_args
3681 self.cache_active = False
3690 self.cache_active = cache_active
3682 # first key should be same for all entries, since all workers should share it
3691 # first key should be same for all entries, since all workers should share it
3683 self.cache_state_uid = cache_state_uid or self.generate_new_state_uid()
3692 self.cache_state_uid = cache_state_uid or self.generate_new_state_uid()
3684
3693
3685 def __repr__(self):
3694 def __repr__(self):
3686 return "<%s('%s:%s[%s]')>" % (
3695 return "<%s('%s:%s[%s]')>" % (
3687 self.cls_name,
3696 self.cls_name,
3688 self.cache_id, self.cache_key, self.cache_active)
3697 self.cache_id, self.cache_key, self.cache_active)
3689
3698
3690 def _cache_key_partition(self):
3699 def _cache_key_partition(self):
3691 prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
3700 prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
3692 return prefix, repo_name, suffix
3701 return prefix, repo_name, suffix
3693
3702
3694 def get_prefix(self):
3703 def get_prefix(self):
3695 """
3704 """
3696 Try to extract prefix from existing cache key. The key could consist
3705 Try to extract prefix from existing cache key. The key could consist
3697 of prefix, repo_name, suffix
3706 of prefix, repo_name, suffix
3698 """
3707 """
3699 # this returns prefix, repo_name, suffix
3708 # this returns prefix, repo_name, suffix
3700 return self._cache_key_partition()[0]
3709 return self._cache_key_partition()[0]
3701
3710
3702 def get_suffix(self):
3711 def get_suffix(self):
3703 """
3712 """
3704 get suffix that might have been used in _get_cache_key to
3713 get suffix that might have been used in _get_cache_key to
3705 generate self.cache_key. Only used for informational purposes
3714 generate self.cache_key. Only used for informational purposes
3706 in repo_edit.mako.
3715 in repo_edit.mako.
3707 """
3716 """
3708 # prefix, repo_name, suffix
3717 # prefix, repo_name, suffix
3709 return self._cache_key_partition()[2]
3718 return self._cache_key_partition()[2]
3710
3719
3711 @classmethod
3720 @classmethod
3712 def generate_new_state_uid(cls, based_on=None):
3721 def generate_new_state_uid(cls, based_on=None):
3713 if based_on:
3722 if based_on:
3714 return str(uuid.uuid5(uuid.NAMESPACE_URL, safe_str(based_on)))
3723 return str(uuid.uuid5(uuid.NAMESPACE_URL, safe_str(based_on)))
3715 else:
3724 else:
3716 return str(uuid.uuid4())
3725 return str(uuid.uuid4())
3717
3726
3718 @classmethod
3727 @classmethod
3719 def delete_all_cache(cls):
3728 def delete_all_cache(cls):
3720 """
3729 """
3721 Delete all cache keys from database.
3730 Delete all cache keys from database.
3722 Should only be run when all instances are down and all entries
3731 Should only be run when all instances are down and all entries
3723 thus stale.
3732 thus stale.
3724 """
3733 """
3725 cls.query().delete()
3734 cls.query().delete()
3726 Session().commit()
3735 Session().commit()
3727
3736
3728 @classmethod
3737 @classmethod
3729 def set_invalidate(cls, cache_uid, delete=False):
3738 def set_invalidate(cls, cache_uid, delete=False):
3730 """
3739 """
3731 Mark all caches of a repo as invalid in the database.
3740 Mark all caches of a repo as invalid in the database.
3732 """
3741 """
3733
3734 try:
3742 try:
3735 qry = Session().query(cls).filter(cls.cache_args == cache_uid)
3743 qry = Session().query(cls).filter(cls.cache_key == cache_uid)
3736 if delete:
3744 if delete:
3737 qry.delete()
3745 qry.delete()
3738 log.debug('cache objects deleted for cache args %s',
3746 log.debug('cache objects deleted for cache args %s',
3739 safe_str(cache_uid))
3747 safe_str(cache_uid))
3740 else:
3748 else:
3741 qry.update({"cache_active": False,
3749 new_uid = cls.generate_new_state_uid()
3742 "cache_state_uid": cls.generate_new_state_uid()})
3750 qry.update({"cache_state_uid": new_uid,
3743 log.debug('cache objects marked as invalid for cache args %s',
3751 "cache_args": f"repo_state:{time.time()}"})
3744 safe_str(cache_uid))
3752 log.debug('cache object %s set new UID %s',
3753 safe_str(cache_uid), new_uid)
3745
3754
3746 Session().commit()
3755 Session().commit()
3747 except Exception:
3756 except Exception:
3748 log.exception(
3757 log.exception(
3749 'Cache key invalidation failed for cache args %s',
3758 'Cache key invalidation failed for cache args %s',
3750 safe_str(cache_uid))
3759 safe_str(cache_uid))
3751 Session().rollback()
3760 Session().rollback()
3752
3761
3753 @classmethod
3762 @classmethod
3754 def get_active_cache(cls, cache_key):
3763 def get_active_cache(cls, cache_key):
3755 inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
3764 inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
3756 if inv_obj:
3765 if inv_obj:
3757 return inv_obj
3766 return inv_obj
3758 return None
3767 return None
3759
3768
3760 @classmethod
3769 @classmethod
3761 def get_namespace_map(cls, namespace):
3770 def get_namespace_map(cls, namespace):
3762 return {
3771 return {
3763 x.cache_key: x
3772 x.cache_key: x
3764 for x in cls.query().filter(cls.cache_args == namespace)}
3773 for x in cls.query().filter(cls.cache_args == namespace)}
3765
3774
3766
3775
3767 class ChangesetComment(Base, BaseModel):
3776 class ChangesetComment(Base, BaseModel):
3768 __tablename__ = 'changeset_comments'
3777 __tablename__ = 'changeset_comments'
3769 __table_args__ = (
3778 __table_args__ = (
3770 Index('cc_revision_idx', 'revision'),
3779 Index('cc_revision_idx', 'revision'),
3771 base_table_args,
3780 base_table_args,
3772 )
3781 )
3773
3782
3774 COMMENT_OUTDATED = 'comment_outdated'
3783 COMMENT_OUTDATED = 'comment_outdated'
3775 COMMENT_TYPE_NOTE = 'note'
3784 COMMENT_TYPE_NOTE = 'note'
3776 COMMENT_TYPE_TODO = 'todo'
3785 COMMENT_TYPE_TODO = 'todo'
3777 COMMENT_TYPES = [COMMENT_TYPE_NOTE, COMMENT_TYPE_TODO]
3786 COMMENT_TYPES = [COMMENT_TYPE_NOTE, COMMENT_TYPE_TODO]
3778
3787
3779 OP_IMMUTABLE = 'immutable'
3788 OP_IMMUTABLE = 'immutable'
3780 OP_CHANGEABLE = 'changeable'
3789 OP_CHANGEABLE = 'changeable'
3781
3790
3782 comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
3791 comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
3783 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3792 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3784 revision = Column('revision', String(40), nullable=True)
3793 revision = Column('revision', String(40), nullable=True)
3785 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3794 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3786 pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True)
3795 pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True)
3787 line_no = Column('line_no', Unicode(10), nullable=True)
3796 line_no = Column('line_no', Unicode(10), nullable=True)
3788 hl_lines = Column('hl_lines', Unicode(512), nullable=True)
3797 hl_lines = Column('hl_lines', Unicode(512), nullable=True)
3789 f_path = Column('f_path', Unicode(1000), nullable=True)
3798 f_path = Column('f_path', Unicode(1000), nullable=True)
3790 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3799 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3791 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3800 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3792 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3801 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3793 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3802 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3794 renderer = Column('renderer', Unicode(64), nullable=True)
3803 renderer = Column('renderer', Unicode(64), nullable=True)
3795 display_state = Column('display_state', Unicode(128), nullable=True)
3804 display_state = Column('display_state', Unicode(128), nullable=True)
3796 immutable_state = Column('immutable_state', Unicode(128), nullable=True, default=OP_CHANGEABLE)
3805 immutable_state = Column('immutable_state', Unicode(128), nullable=True, default=OP_CHANGEABLE)
3797 draft = Column('draft', Boolean(), nullable=True, default=False)
3806 draft = Column('draft', Boolean(), nullable=True, default=False)
3798
3807
3799 comment_type = Column('comment_type', Unicode(128), nullable=True, default=COMMENT_TYPE_NOTE)
3808 comment_type = Column('comment_type', Unicode(128), nullable=True, default=COMMENT_TYPE_NOTE)
3800 resolved_comment_id = Column('resolved_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=True)
3809 resolved_comment_id = Column('resolved_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=True)
3801
3810
3802 resolved_comment = relationship('ChangesetComment', remote_side=comment_id, back_populates='resolved_by')
3811 resolved_comment = relationship('ChangesetComment', remote_side=comment_id, back_populates='resolved_by')
3803 resolved_by = relationship('ChangesetComment', back_populates='resolved_comment')
3812 resolved_by = relationship('ChangesetComment', back_populates='resolved_comment')
3804
3813
3805 author = relationship('User', lazy='select', back_populates='user_comments')
3814 author = relationship('User', lazy='select', back_populates='user_comments')
3806 repo = relationship('Repository', back_populates='comments')
3815 repo = relationship('Repository', back_populates='comments')
3807 status_change = relationship('ChangesetStatus', cascade="all, delete-orphan", lazy='select', back_populates='comment')
3816 status_change = relationship('ChangesetStatus', cascade="all, delete-orphan", lazy='select', back_populates='comment')
3808 pull_request = relationship('PullRequest', lazy='select', back_populates='comments')
3817 pull_request = relationship('PullRequest', lazy='select', back_populates='comments')
3809 pull_request_version = relationship('PullRequestVersion', lazy='select')
3818 pull_request_version = relationship('PullRequestVersion', lazy='select')
3810 history = relationship('ChangesetCommentHistory', cascade='all, delete-orphan', lazy='select', order_by='ChangesetCommentHistory.version', back_populates="comment")
3819 history = relationship('ChangesetCommentHistory', cascade='all, delete-orphan', lazy='select', order_by='ChangesetCommentHistory.version', back_populates="comment")
3811
3820
3812 @classmethod
3821 @classmethod
3813 def get_users(cls, revision=None, pull_request_id=None):
3822 def get_users(cls, revision=None, pull_request_id=None):
3814 """
3823 """
3815 Returns user associated with this ChangesetComment. ie those
3824 Returns user associated with this ChangesetComment. ie those
3816 who actually commented
3825 who actually commented
3817
3826
3818 :param cls:
3827 :param cls:
3819 :param revision:
3828 :param revision:
3820 """
3829 """
3821 q = Session().query(User).join(ChangesetComment.author)
3830 q = Session().query(User).join(ChangesetComment.author)
3822 if revision:
3831 if revision:
3823 q = q.filter(cls.revision == revision)
3832 q = q.filter(cls.revision == revision)
3824 elif pull_request_id:
3833 elif pull_request_id:
3825 q = q.filter(cls.pull_request_id == pull_request_id)
3834 q = q.filter(cls.pull_request_id == pull_request_id)
3826 return q.all()
3835 return q.all()
3827
3836
3828 @classmethod
3837 @classmethod
3829 def get_index_from_version(cls, pr_version, versions=None, num_versions=None) -> int:
3838 def get_index_from_version(cls, pr_version, versions=None, num_versions=None) -> int:
3830 if pr_version is None:
3839 if pr_version is None:
3831 return 0
3840 return 0
3832
3841
3833 if versions is not None:
3842 if versions is not None:
3834 num_versions = [x.pull_request_version_id for x in versions]
3843 num_versions = [x.pull_request_version_id for x in versions]
3835
3844
3836 num_versions = num_versions or []
3845 num_versions = num_versions or []
3837 try:
3846 try:
3838 return num_versions.index(pr_version) + 1
3847 return num_versions.index(pr_version) + 1
3839 except (IndexError, ValueError):
3848 except (IndexError, ValueError):
3840 return 0
3849 return 0
3841
3850
3842 @property
3851 @property
3843 def outdated(self):
3852 def outdated(self):
3844 return self.display_state == self.COMMENT_OUTDATED
3853 return self.display_state == self.COMMENT_OUTDATED
3845
3854
3846 @property
3855 @property
3847 def outdated_js(self):
3856 def outdated_js(self):
3848 return str_json(self.display_state == self.COMMENT_OUTDATED)
3857 return str_json(self.display_state == self.COMMENT_OUTDATED)
3849
3858
3850 @property
3859 @property
3851 def immutable(self):
3860 def immutable(self):
3852 return self.immutable_state == self.OP_IMMUTABLE
3861 return self.immutable_state == self.OP_IMMUTABLE
3853
3862
3854 def outdated_at_version(self, version: int) -> bool:
3863 def outdated_at_version(self, version: int) -> bool:
3855 """
3864 """
3856 Checks if comment is outdated for given pull request version
3865 Checks if comment is outdated for given pull request version
3857 """
3866 """
3858
3867
3859 def version_check():
3868 def version_check():
3860 return self.pull_request_version_id and self.pull_request_version_id != version
3869 return self.pull_request_version_id and self.pull_request_version_id != version
3861
3870
3862 if self.is_inline:
3871 if self.is_inline:
3863 return self.outdated and version_check()
3872 return self.outdated and version_check()
3864 else:
3873 else:
3865 # general comments don't have .outdated set, also latest don't have a version
3874 # general comments don't have .outdated set, also latest don't have a version
3866 return version_check()
3875 return version_check()
3867
3876
3868 def outdated_at_version_js(self, version):
3877 def outdated_at_version_js(self, version):
3869 """
3878 """
3870 Checks if comment is outdated for given pull request version
3879 Checks if comment is outdated for given pull request version
3871 """
3880 """
3872 return str_json(self.outdated_at_version(version))
3881 return str_json(self.outdated_at_version(version))
3873
3882
3874 def older_than_version(self, version: int) -> bool:
3883 def older_than_version(self, version: int) -> bool:
3875 """
3884 """
3876 Checks if comment is made from a previous version than given.
3885 Checks if comment is made from a previous version than given.
3877 Assumes self.pull_request_version.pull_request_version_id is an integer if not None.
3886 Assumes self.pull_request_version.pull_request_version_id is an integer if not None.
3878 """
3887 """
3879
3888
3880 # If version is None, return False as the current version cannot be less than None
3889 # If version is None, return False as the current version cannot be less than None
3881 if version is None:
3890 if version is None:
3882 return False
3891 return False
3883
3892
3884 # Ensure that the version is an integer to prevent TypeError on comparison
3893 # Ensure that the version is an integer to prevent TypeError on comparison
3885 if not isinstance(version, int):
3894 if not isinstance(version, int):
3886 raise ValueError("The provided version must be an integer.")
3895 raise ValueError("The provided version must be an integer.")
3887
3896
3888 # Initialize current version to 0 or pull_request_version_id if it's available
3897 # Initialize current version to 0 or pull_request_version_id if it's available
3889 cur_ver = 0
3898 cur_ver = 0
3890 if self.pull_request_version and self.pull_request_version.pull_request_version_id is not None:
3899 if self.pull_request_version and self.pull_request_version.pull_request_version_id is not None:
3891 cur_ver = self.pull_request_version.pull_request_version_id
3900 cur_ver = self.pull_request_version.pull_request_version_id
3892
3901
3893 # Return True if the current version is less than the given version
3902 # Return True if the current version is less than the given version
3894 return cur_ver < version
3903 return cur_ver < version
3895
3904
3896 def older_than_version_js(self, version):
3905 def older_than_version_js(self, version):
3897 """
3906 """
3898 Checks if comment is made from previous version than given
3907 Checks if comment is made from previous version than given
3899 """
3908 """
3900 return str_json(self.older_than_version(version))
3909 return str_json(self.older_than_version(version))
3901
3910
3902 @property
3911 @property
3903 def commit_id(self):
3912 def commit_id(self):
3904 """New style naming to stop using .revision"""
3913 """New style naming to stop using .revision"""
3905 return self.revision
3914 return self.revision
3906
3915
3907 @property
3916 @property
3908 def resolved(self):
3917 def resolved(self):
3909 return self.resolved_by[0] if self.resolved_by else None
3918 return self.resolved_by[0] if self.resolved_by else None
3910
3919
3911 @property
3920 @property
3912 def is_todo(self):
3921 def is_todo(self):
3913 return self.comment_type == self.COMMENT_TYPE_TODO
3922 return self.comment_type == self.COMMENT_TYPE_TODO
3914
3923
3915 @property
3924 @property
3916 def is_inline(self):
3925 def is_inline(self):
3917 if self.line_no and self.f_path:
3926 if self.line_no and self.f_path:
3918 return True
3927 return True
3919 return False
3928 return False
3920
3929
3921 @property
3930 @property
3922 def last_version(self):
3931 def last_version(self):
3923 version = 0
3932 version = 0
3924 if self.history:
3933 if self.history:
3925 version = self.history[-1].version
3934 version = self.history[-1].version
3926 return version
3935 return version
3927
3936
3928 def get_index_version(self, versions):
3937 def get_index_version(self, versions):
3929 return self.get_index_from_version(
3938 return self.get_index_from_version(
3930 self.pull_request_version_id, versions)
3939 self.pull_request_version_id, versions)
3931
3940
3932 @property
3941 @property
3933 def review_status(self):
3942 def review_status(self):
3934 if self.status_change:
3943 if self.status_change:
3935 return self.status_change[0].status
3944 return self.status_change[0].status
3936
3945
3937 @property
3946 @property
3938 def review_status_lbl(self):
3947 def review_status_lbl(self):
3939 if self.status_change:
3948 if self.status_change:
3940 return self.status_change[0].status_lbl
3949 return self.status_change[0].status_lbl
3941
3950
3942 def __repr__(self):
3951 def __repr__(self):
3943 if self.comment_id:
3952 if self.comment_id:
3944 return f'<DB:Comment #{self.comment_id}>'
3953 return f'<DB:Comment #{self.comment_id}>'
3945 else:
3954 else:
3946 return f'<DB:Comment at {id(self)!r}>'
3955 return f'<DB:Comment at {id(self)!r}>'
3947
3956
3948 def get_api_data(self):
3957 def get_api_data(self):
3949 comment = self
3958 comment = self
3950
3959
3951 data = {
3960 data = {
3952 'comment_id': comment.comment_id,
3961 'comment_id': comment.comment_id,
3953 'comment_type': comment.comment_type,
3962 'comment_type': comment.comment_type,
3954 'comment_text': comment.text,
3963 'comment_text': comment.text,
3955 'comment_status': comment.status_change,
3964 'comment_status': comment.status_change,
3956 'comment_f_path': comment.f_path,
3965 'comment_f_path': comment.f_path,
3957 'comment_lineno': comment.line_no,
3966 'comment_lineno': comment.line_no,
3958 'comment_author': comment.author,
3967 'comment_author': comment.author,
3959 'comment_created_on': comment.created_on,
3968 'comment_created_on': comment.created_on,
3960 'comment_resolved_by': self.resolved,
3969 'comment_resolved_by': self.resolved,
3961 'comment_commit_id': comment.revision,
3970 'comment_commit_id': comment.revision,
3962 'comment_pull_request_id': comment.pull_request_id,
3971 'comment_pull_request_id': comment.pull_request_id,
3963 'comment_last_version': self.last_version
3972 'comment_last_version': self.last_version
3964 }
3973 }
3965 return data
3974 return data
3966
3975
3967 def __json__(self):
3976 def __json__(self):
3968 data = dict()
3977 data = dict()
3969 data.update(self.get_api_data())
3978 data.update(self.get_api_data())
3970 return data
3979 return data
3971
3980
3972
3981
3973 class ChangesetCommentHistory(Base, BaseModel):
3982 class ChangesetCommentHistory(Base, BaseModel):
3974 __tablename__ = 'changeset_comments_history'
3983 __tablename__ = 'changeset_comments_history'
3975 __table_args__ = (
3984 __table_args__ = (
3976 Index('cch_comment_id_idx', 'comment_id'),
3985 Index('cch_comment_id_idx', 'comment_id'),
3977 base_table_args,
3986 base_table_args,
3978 )
3987 )
3979
3988
3980 comment_history_id = Column('comment_history_id', Integer(), nullable=False, primary_key=True)
3989 comment_history_id = Column('comment_history_id', Integer(), nullable=False, primary_key=True)
3981 comment_id = Column('comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=False)
3990 comment_id = Column('comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=False)
3982 version = Column("version", Integer(), nullable=False, default=0)
3991 version = Column("version", Integer(), nullable=False, default=0)
3983 created_by_user_id = Column('created_by_user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3992 created_by_user_id = Column('created_by_user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3984 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3993 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3985 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3994 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3986 deleted = Column('deleted', Boolean(), default=False)
3995 deleted = Column('deleted', Boolean(), default=False)
3987
3996
3988 author = relationship('User', lazy='joined')
3997 author = relationship('User', lazy='joined')
3989 comment = relationship('ChangesetComment', cascade="all, delete", back_populates="history")
3998 comment = relationship('ChangesetComment', cascade="all, delete", back_populates="history")
3990
3999
3991 @classmethod
4000 @classmethod
3992 def get_version(cls, comment_id):
4001 def get_version(cls, comment_id):
3993 q = Session().query(ChangesetCommentHistory).filter(
4002 q = Session().query(ChangesetCommentHistory).filter(
3994 ChangesetCommentHistory.comment_id == comment_id).order_by(ChangesetCommentHistory.version.desc())
4003 ChangesetCommentHistory.comment_id == comment_id).order_by(ChangesetCommentHistory.version.desc())
3995 if q.count() == 0:
4004 if q.count() == 0:
3996 return 1
4005 return 1
3997 elif q.count() >= q[0].version:
4006 elif q.count() >= q[0].version:
3998 return q.count() + 1
4007 return q.count() + 1
3999 else:
4008 else:
4000 return q[0].version + 1
4009 return q[0].version + 1
4001
4010
4002
4011
4003 class ChangesetStatus(Base, BaseModel):
4012 class ChangesetStatus(Base, BaseModel):
4004 __tablename__ = 'changeset_statuses'
4013 __tablename__ = 'changeset_statuses'
4005 __table_args__ = (
4014 __table_args__ = (
4006 Index('cs_revision_idx', 'revision'),
4015 Index('cs_revision_idx', 'revision'),
4007 Index('cs_version_idx', 'version'),
4016 Index('cs_version_idx', 'version'),
4008 UniqueConstraint('repo_id', 'revision', 'version'),
4017 UniqueConstraint('repo_id', 'revision', 'version'),
4009 base_table_args
4018 base_table_args
4010 )
4019 )
4011
4020
4012 STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
4021 STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
4013 STATUS_APPROVED = 'approved'
4022 STATUS_APPROVED = 'approved'
4014 STATUS_REJECTED = 'rejected'
4023 STATUS_REJECTED = 'rejected'
4015 STATUS_UNDER_REVIEW = 'under_review'
4024 STATUS_UNDER_REVIEW = 'under_review'
4016
4025
4017 STATUSES = [
4026 STATUSES = [
4018 (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default
4027 (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default
4019 (STATUS_APPROVED, _("Approved")),
4028 (STATUS_APPROVED, _("Approved")),
4020 (STATUS_REJECTED, _("Rejected")),
4029 (STATUS_REJECTED, _("Rejected")),
4021 (STATUS_UNDER_REVIEW, _("Under Review")),
4030 (STATUS_UNDER_REVIEW, _("Under Review")),
4022 ]
4031 ]
4023
4032
4024 changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
4033 changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
4025 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
4034 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
4026 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
4035 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
4027 revision = Column('revision', String(40), nullable=False)
4036 revision = Column('revision', String(40), nullable=False)
4028 status = Column('status', String(128), nullable=False, default=DEFAULT)
4037 status = Column('status', String(128), nullable=False, default=DEFAULT)
4029 changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
4038 changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
4030 modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
4039 modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
4031 version = Column('version', Integer(), nullable=False, default=0)
4040 version = Column('version', Integer(), nullable=False, default=0)
4032 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
4041 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
4033
4042
4034 author = relationship('User', lazy='select')
4043 author = relationship('User', lazy='select')
4035 repo = relationship('Repository', lazy='select')
4044 repo = relationship('Repository', lazy='select')
4036 comment = relationship('ChangesetComment', lazy='select', back_populates='status_change')
4045 comment = relationship('ChangesetComment', lazy='select', back_populates='status_change')
4037 pull_request = relationship('PullRequest', lazy='select', back_populates='statuses')
4046 pull_request = relationship('PullRequest', lazy='select', back_populates='statuses')
4038
4047
4039 def __repr__(self):
4048 def __repr__(self):
4040 return f"<{self.cls_name}('{self.status}[v{self.version}]:{self.author}')>"
4049 return f"<{self.cls_name}('{self.status}[v{self.version}]:{self.author}')>"
4041
4050
4042 @classmethod
4051 @classmethod
4043 def get_status_lbl(cls, value):
4052 def get_status_lbl(cls, value):
4044 return dict(cls.STATUSES).get(value)
4053 return dict(cls.STATUSES).get(value)
4045
4054
4046 @property
4055 @property
4047 def status_lbl(self):
4056 def status_lbl(self):
4048 return ChangesetStatus.get_status_lbl(self.status)
4057 return ChangesetStatus.get_status_lbl(self.status)
4049
4058
4050 def get_api_data(self):
4059 def get_api_data(self):
4051 status = self
4060 status = self
4052 data = {
4061 data = {
4053 'status_id': status.changeset_status_id,
4062 'status_id': status.changeset_status_id,
4054 'status': status.status,
4063 'status': status.status,
4055 }
4064 }
4056 return data
4065 return data
4057
4066
4058 def __json__(self):
4067 def __json__(self):
4059 data = dict()
4068 data = dict()
4060 data.update(self.get_api_data())
4069 data.update(self.get_api_data())
4061 return data
4070 return data
4062
4071
4063
4072
4064 class _SetState(object):
4073 class _SetState(object):
4065 """
4074 """
4066 Context processor allowing changing state for sensitive operation such as
4075 Context processor allowing changing state for sensitive operation such as
4067 pull request update or merge
4076 pull request update or merge
4068 """
4077 """
4069
4078
4070 def __init__(self, pull_request, pr_state, back_state=None):
4079 def __init__(self, pull_request, pr_state, back_state=None):
4071 self._pr = pull_request
4080 self._pr = pull_request
4072 self._org_state = back_state or pull_request.pull_request_state
4081 self._org_state = back_state or pull_request.pull_request_state
4073 self._pr_state = pr_state
4082 self._pr_state = pr_state
4074 self._current_state = None
4083 self._current_state = None
4075
4084
4076 def __enter__(self):
4085 def __enter__(self):
4077 log.debug('StateLock: entering set state context of pr %s, setting state to: `%s`',
4086 log.debug('StateLock: entering set state context of pr %s, setting state to: `%s`',
4078 self._pr, self._pr_state)
4087 self._pr, self._pr_state)
4079 self.set_pr_state(self._pr_state)
4088 self.set_pr_state(self._pr_state)
4080 return self
4089 return self
4081
4090
4082 def __exit__(self, exc_type, exc_val, exc_tb):
4091 def __exit__(self, exc_type, exc_val, exc_tb):
4083 if exc_val is not None or exc_type is not None:
4092 if exc_val is not None or exc_type is not None:
4084 log.error(traceback.format_tb(exc_tb))
4093 log.error(traceback.format_tb(exc_tb))
4085 return None
4094 return None
4086
4095
4087 self.set_pr_state(self._org_state)
4096 self.set_pr_state(self._org_state)
4088 log.debug('StateLock: exiting set state context of pr %s, setting state to: `%s`',
4097 log.debug('StateLock: exiting set state context of pr %s, setting state to: `%s`',
4089 self._pr, self._org_state)
4098 self._pr, self._org_state)
4090
4099
4091 @property
4100 @property
4092 def state(self):
4101 def state(self):
4093 return self._current_state
4102 return self._current_state
4094
4103
4095 def set_pr_state(self, pr_state):
4104 def set_pr_state(self, pr_state):
4096 try:
4105 try:
4097 self._pr.pull_request_state = pr_state
4106 self._pr.pull_request_state = pr_state
4098 Session().add(self._pr)
4107 Session().add(self._pr)
4099 Session().commit()
4108 Session().commit()
4100 self._current_state = pr_state
4109 self._current_state = pr_state
4101 except Exception:
4110 except Exception:
4102 log.exception('Failed to set PullRequest %s state to %s', self._pr, pr_state)
4111 log.exception('Failed to set PullRequest %s state to %s', self._pr, pr_state)
4103 raise
4112 raise
4104
4113
4105
4114
4106 class _PullRequestBase(BaseModel):
4115 class _PullRequestBase(BaseModel):
4107 """
4116 """
4108 Common attributes of pull request and version entries.
4117 Common attributes of pull request and version entries.
4109 """
4118 """
4110
4119
4111 # .status values
4120 # .status values
4112 STATUS_NEW = 'new'
4121 STATUS_NEW = 'new'
4113 STATUS_OPEN = 'open'
4122 STATUS_OPEN = 'open'
4114 STATUS_CLOSED = 'closed'
4123 STATUS_CLOSED = 'closed'
4115
4124
4116 # available states
4125 # available states
4117 STATE_CREATING = 'creating'
4126 STATE_CREATING = 'creating'
4118 STATE_UPDATING = 'updating'
4127 STATE_UPDATING = 'updating'
4119 STATE_MERGING = 'merging'
4128 STATE_MERGING = 'merging'
4120 STATE_CREATED = 'created'
4129 STATE_CREATED = 'created'
4121
4130
4122 title = Column('title', Unicode(255), nullable=True)
4131 title = Column('title', Unicode(255), nullable=True)
4123 description = Column(
4132 description = Column(
4124 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'),
4133 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'),
4125 nullable=True)
4134 nullable=True)
4126 description_renderer = Column('description_renderer', Unicode(64), nullable=True)
4135 description_renderer = Column('description_renderer', Unicode(64), nullable=True)
4127
4136
4128 # new/open/closed status of pull request (not approve/reject/etc)
4137 # new/open/closed status of pull request (not approve/reject/etc)
4129 status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW)
4138 status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW)
4130 created_on = Column(
4139 created_on = Column(
4131 'created_on', DateTime(timezone=False), nullable=False,
4140 'created_on', DateTime(timezone=False), nullable=False,
4132 default=datetime.datetime.now)
4141 default=datetime.datetime.now)
4133 updated_on = Column(
4142 updated_on = Column(
4134 'updated_on', DateTime(timezone=False), nullable=False,
4143 'updated_on', DateTime(timezone=False), nullable=False,
4135 default=datetime.datetime.now)
4144 default=datetime.datetime.now)
4136
4145
4137 pull_request_state = Column("pull_request_state", String(255), nullable=True)
4146 pull_request_state = Column("pull_request_state", String(255), nullable=True)
4138
4147
4139 @declared_attr
4148 @declared_attr
4140 def user_id(cls):
4149 def user_id(cls):
4141 return Column(
4150 return Column(
4142 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
4151 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
4143 unique=None)
4152 unique=None)
4144
4153
4145 # 500 revisions max
4154 # 500 revisions max
4146 _revisions = Column(
4155 _revisions = Column(
4147 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql'))
4156 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql'))
4148
4157
4149 common_ancestor_id = Column('common_ancestor_id', Unicode(255), nullable=True)
4158 common_ancestor_id = Column('common_ancestor_id', Unicode(255), nullable=True)
4150
4159
4151 @declared_attr
4160 @declared_attr
4152 def source_repo_id(cls):
4161 def source_repo_id(cls):
4153 # TODO: dan: rename column to source_repo_id
4162 # TODO: dan: rename column to source_repo_id
4154 return Column(
4163 return Column(
4155 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'),
4164 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'),
4156 nullable=False)
4165 nullable=False)
4157
4166
4158 @declared_attr
4167 @declared_attr
4159 def pr_source(cls):
4168 def pr_source(cls):
4160 return relationship(
4169 return relationship(
4161 'Repository',
4170 'Repository',
4162 primaryjoin=f'{cls.__name__}.source_repo_id==Repository.repo_id',
4171 primaryjoin=f'{cls.__name__}.source_repo_id==Repository.repo_id',
4163 overlaps="pull_requests_source"
4172 overlaps="pull_requests_source"
4164 )
4173 )
4165
4174
4166 _source_ref = Column('org_ref', Unicode(255), nullable=False)
4175 _source_ref = Column('org_ref', Unicode(255), nullable=False)
4167
4176
4168 @hybrid_property
4177 @hybrid_property
4169 def source_ref(self):
4178 def source_ref(self):
4170 return self._source_ref
4179 return self._source_ref
4171
4180
4172 @source_ref.setter
4181 @source_ref.setter
4173 def source_ref(self, val):
4182 def source_ref(self, val):
4174 parts = (val or '').split(':')
4183 parts = (val or '').split(':')
4175 if len(parts) != 3:
4184 if len(parts) != 3:
4176 raise ValueError(
4185 raise ValueError(
4177 'Invalid reference format given: {}, expected X:Y:Z'.format(val))
4186 'Invalid reference format given: {}, expected X:Y:Z'.format(val))
4178 self._source_ref = safe_str(val)
4187 self._source_ref = safe_str(val)
4179
4188
4180 _target_ref = Column('other_ref', Unicode(255), nullable=False)
4189 _target_ref = Column('other_ref', Unicode(255), nullable=False)
4181
4190
4182 @hybrid_property
4191 @hybrid_property
4183 def target_ref(self):
4192 def target_ref(self):
4184 return self._target_ref
4193 return self._target_ref
4185
4194
4186 @target_ref.setter
4195 @target_ref.setter
4187 def target_ref(self, val):
4196 def target_ref(self, val):
4188 parts = (val or '').split(':')
4197 parts = (val or '').split(':')
4189 if len(parts) != 3:
4198 if len(parts) != 3:
4190 raise ValueError(
4199 raise ValueError(
4191 'Invalid reference format given: {}, expected X:Y:Z'.format(val))
4200 'Invalid reference format given: {}, expected X:Y:Z'.format(val))
4192 self._target_ref = safe_str(val)
4201 self._target_ref = safe_str(val)
4193
4202
4194 @declared_attr
4203 @declared_attr
4195 def target_repo_id(cls):
4204 def target_repo_id(cls):
4196 # TODO: dan: rename column to target_repo_id
4205 # TODO: dan: rename column to target_repo_id
4197 return Column(
4206 return Column(
4198 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'),
4207 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'),
4199 nullable=False)
4208 nullable=False)
4200
4209
4201 @declared_attr
4210 @declared_attr
4202 def pr_target(cls):
4211 def pr_target(cls):
4203 return relationship(
4212 return relationship(
4204 'Repository',
4213 'Repository',
4205 primaryjoin=f'{cls.__name__}.target_repo_id==Repository.repo_id',
4214 primaryjoin=f'{cls.__name__}.target_repo_id==Repository.repo_id',
4206 overlaps="pull_requests_target"
4215 overlaps="pull_requests_target"
4207 )
4216 )
4208
4217
4209 _shadow_merge_ref = Column('shadow_merge_ref', Unicode(255), nullable=True)
4218 _shadow_merge_ref = Column('shadow_merge_ref', Unicode(255), nullable=True)
4210
4219
4211 # TODO: dan: rename column to last_merge_source_rev
4220 # TODO: dan: rename column to last_merge_source_rev
4212 _last_merge_source_rev = Column(
4221 _last_merge_source_rev = Column(
4213 'last_merge_org_rev', String(40), nullable=True)
4222 'last_merge_org_rev', String(40), nullable=True)
4214 # TODO: dan: rename column to last_merge_target_rev
4223 # TODO: dan: rename column to last_merge_target_rev
4215 _last_merge_target_rev = Column(
4224 _last_merge_target_rev = Column(
4216 'last_merge_other_rev', String(40), nullable=True)
4225 'last_merge_other_rev', String(40), nullable=True)
4217 _last_merge_status = Column('merge_status', Integer(), nullable=True)
4226 _last_merge_status = Column('merge_status', Integer(), nullable=True)
4218 last_merge_metadata = Column(
4227 last_merge_metadata = Column(
4219 'last_merge_metadata', MutationObj.as_mutable(
4228 'last_merge_metadata', MutationObj.as_mutable(
4220 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4229 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4221
4230
4222 merge_rev = Column('merge_rev', String(40), nullable=True)
4231 merge_rev = Column('merge_rev', String(40), nullable=True)
4223
4232
4224 reviewer_data = Column(
4233 reviewer_data = Column(
4225 'reviewer_data_json', MutationObj.as_mutable(
4234 'reviewer_data_json', MutationObj.as_mutable(
4226 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4235 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4227
4236
4228 @property
4237 @property
4229 def reviewer_data_json(self):
4238 def reviewer_data_json(self):
4230 return str_json(self.reviewer_data)
4239 return str_json(self.reviewer_data)
4231
4240
4232 @property
4241 @property
4233 def last_merge_metadata_parsed(self):
4242 def last_merge_metadata_parsed(self):
4234 metadata = {}
4243 metadata = {}
4235 if not self.last_merge_metadata:
4244 if not self.last_merge_metadata:
4236 return metadata
4245 return metadata
4237
4246
4238 if hasattr(self.last_merge_metadata, 'de_coerce'):
4247 if hasattr(self.last_merge_metadata, 'de_coerce'):
4239 for k, v in self.last_merge_metadata.de_coerce().items():
4248 for k, v in self.last_merge_metadata.de_coerce().items():
4240 if k in ['target_ref', 'source_ref']:
4249 if k in ['target_ref', 'source_ref']:
4241 metadata[k] = Reference(v['type'], v['name'], v['commit_id'])
4250 metadata[k] = Reference(v['type'], v['name'], v['commit_id'])
4242 else:
4251 else:
4243 if hasattr(v, 'de_coerce'):
4252 if hasattr(v, 'de_coerce'):
4244 metadata[k] = v.de_coerce()
4253 metadata[k] = v.de_coerce()
4245 else:
4254 else:
4246 metadata[k] = v
4255 metadata[k] = v
4247 return metadata
4256 return metadata
4248
4257
4249 @property
4258 @property
4250 def work_in_progress(self):
4259 def work_in_progress(self):
4251 """checks if pull request is work in progress by checking the title"""
4260 """checks if pull request is work in progress by checking the title"""
4252 title = self.title.upper()
4261 title = self.title.upper()
4253 if re.match(r'^(\[WIP\]\s*|WIP:\s*|WIP\s+)', title):
4262 if re.match(r'^(\[WIP\]\s*|WIP:\s*|WIP\s+)', title):
4254 return True
4263 return True
4255 return False
4264 return False
4256
4265
4257 @property
4266 @property
4258 def title_safe(self):
4267 def title_safe(self):
4259 return self.title\
4268 return self.title\
4260 .replace('{', '{{')\
4269 .replace('{', '{{')\
4261 .replace('}', '}}')
4270 .replace('}', '}}')
4262
4271
4263 @hybrid_property
4272 @hybrid_property
4264 def description_safe(self):
4273 def description_safe(self):
4265 from rhodecode.lib import helpers as h
4274 from rhodecode.lib import helpers as h
4266 return h.escape(self.description)
4275 return h.escape(self.description)
4267
4276
4268 @hybrid_property
4277 @hybrid_property
4269 def revisions(self):
4278 def revisions(self):
4270 return self._revisions.split(':') if self._revisions else []
4279 return self._revisions.split(':') if self._revisions else []
4271
4280
4272 @revisions.setter
4281 @revisions.setter
4273 def revisions(self, val):
4282 def revisions(self, val):
4274 self._revisions = ':'.join(val)
4283 self._revisions = ':'.join(val)
4275
4284
4276 @hybrid_property
4285 @hybrid_property
4277 def last_merge_status(self):
4286 def last_merge_status(self):
4278 return safe_int(self._last_merge_status)
4287 return safe_int(self._last_merge_status)
4279
4288
4280 @last_merge_status.setter
4289 @last_merge_status.setter
4281 def last_merge_status(self, val):
4290 def last_merge_status(self, val):
4282 self._last_merge_status = val
4291 self._last_merge_status = val
4283
4292
4284 @declared_attr
4293 @declared_attr
4285 def author(cls):
4294 def author(cls):
4286 return relationship(
4295 return relationship(
4287 'User', lazy='joined',
4296 'User', lazy='joined',
4288 #TODO, problem that is somehow :?
4297 #TODO, problem that is somehow :?
4289 #back_populates='user_pull_requests'
4298 #back_populates='user_pull_requests'
4290 )
4299 )
4291
4300
4292 @declared_attr
4301 @declared_attr
4293 def source_repo(cls):
4302 def source_repo(cls):
4294 return relationship(
4303 return relationship(
4295 'Repository',
4304 'Repository',
4296 primaryjoin=f'{cls.__name__}.source_repo_id==Repository.repo_id',
4305 primaryjoin=f'{cls.__name__}.source_repo_id==Repository.repo_id',
4297 #back_populates=''
4306 #back_populates=''
4298 )
4307 )
4299
4308
4300 @property
4309 @property
4301 def source_ref_parts(self):
4310 def source_ref_parts(self):
4302 return self.unicode_to_reference(self.source_ref)
4311 return self.unicode_to_reference(self.source_ref)
4303
4312
4304 @declared_attr
4313 @declared_attr
4305 def target_repo(cls):
4314 def target_repo(cls):
4306 return relationship(
4315 return relationship(
4307 'Repository',
4316 'Repository',
4308 primaryjoin=f'{cls.__name__}.target_repo_id==Repository.repo_id'
4317 primaryjoin=f'{cls.__name__}.target_repo_id==Repository.repo_id'
4309 )
4318 )
4310
4319
4311 @property
4320 @property
4312 def target_ref_parts(self):
4321 def target_ref_parts(self):
4313 return self.unicode_to_reference(self.target_ref)
4322 return self.unicode_to_reference(self.target_ref)
4314
4323
4315 @property
4324 @property
4316 def shadow_merge_ref(self):
4325 def shadow_merge_ref(self):
4317 return self.unicode_to_reference(self._shadow_merge_ref)
4326 return self.unicode_to_reference(self._shadow_merge_ref)
4318
4327
4319 @shadow_merge_ref.setter
4328 @shadow_merge_ref.setter
4320 def shadow_merge_ref(self, ref):
4329 def shadow_merge_ref(self, ref):
4321 self._shadow_merge_ref = self.reference_to_unicode(ref)
4330 self._shadow_merge_ref = self.reference_to_unicode(ref)
4322
4331
4323 @staticmethod
4332 @staticmethod
4324 def unicode_to_reference(raw):
4333 def unicode_to_reference(raw):
4325 return unicode_to_reference(raw)
4334 return unicode_to_reference(raw)
4326
4335
4327 @staticmethod
4336 @staticmethod
4328 def reference_to_unicode(ref):
4337 def reference_to_unicode(ref):
4329 return reference_to_unicode(ref)
4338 return reference_to_unicode(ref)
4330
4339
4331 def get_api_data(self, with_merge_state=True):
4340 def get_api_data(self, with_merge_state=True):
4332 from rhodecode.model.pull_request import PullRequestModel
4341 from rhodecode.model.pull_request import PullRequestModel
4333
4342
4334 pull_request = self
4343 pull_request = self
4335 if with_merge_state:
4344 if with_merge_state:
4336 merge_response, merge_status, msg = \
4345 merge_response, merge_status, msg = \
4337 PullRequestModel().merge_status(pull_request)
4346 PullRequestModel().merge_status(pull_request)
4338 merge_state = {
4347 merge_state = {
4339 'status': merge_status,
4348 'status': merge_status,
4340 'message': safe_str(msg),
4349 'message': safe_str(msg),
4341 }
4350 }
4342 else:
4351 else:
4343 merge_state = {'status': 'not_available',
4352 merge_state = {'status': 'not_available',
4344 'message': 'not_available'}
4353 'message': 'not_available'}
4345
4354
4346 merge_data = {
4355 merge_data = {
4347 'clone_url': PullRequestModel().get_shadow_clone_url(pull_request),
4356 'clone_url': PullRequestModel().get_shadow_clone_url(pull_request),
4348 'reference': (
4357 'reference': (
4349 pull_request.shadow_merge_ref.asdict()
4358 pull_request.shadow_merge_ref.asdict()
4350 if pull_request.shadow_merge_ref else None),
4359 if pull_request.shadow_merge_ref else None),
4351 }
4360 }
4352
4361
4353 data = {
4362 data = {
4354 'pull_request_id': pull_request.pull_request_id,
4363 'pull_request_id': pull_request.pull_request_id,
4355 'url': PullRequestModel().get_url(pull_request),
4364 'url': PullRequestModel().get_url(pull_request),
4356 'title': pull_request.title,
4365 'title': pull_request.title,
4357 'description': pull_request.description,
4366 'description': pull_request.description,
4358 'status': pull_request.status,
4367 'status': pull_request.status,
4359 'state': pull_request.pull_request_state,
4368 'state': pull_request.pull_request_state,
4360 'created_on': pull_request.created_on,
4369 'created_on': pull_request.created_on,
4361 'updated_on': pull_request.updated_on,
4370 'updated_on': pull_request.updated_on,
4362 'commit_ids': pull_request.revisions,
4371 'commit_ids': pull_request.revisions,
4363 'review_status': pull_request.calculated_review_status(),
4372 'review_status': pull_request.calculated_review_status(),
4364 'mergeable': merge_state,
4373 'mergeable': merge_state,
4365 'source': {
4374 'source': {
4366 'clone_url': pull_request.source_repo.clone_url(),
4375 'clone_url': pull_request.source_repo.clone_url(),
4367 'repository': pull_request.source_repo.repo_name,
4376 'repository': pull_request.source_repo.repo_name,
4368 'reference': {
4377 'reference': {
4369 'name': pull_request.source_ref_parts.name,
4378 'name': pull_request.source_ref_parts.name,
4370 'type': pull_request.source_ref_parts.type,
4379 'type': pull_request.source_ref_parts.type,
4371 'commit_id': pull_request.source_ref_parts.commit_id,
4380 'commit_id': pull_request.source_ref_parts.commit_id,
4372 },
4381 },
4373 },
4382 },
4374 'target': {
4383 'target': {
4375 'clone_url': pull_request.target_repo.clone_url(),
4384 'clone_url': pull_request.target_repo.clone_url(),
4376 'repository': pull_request.target_repo.repo_name,
4385 'repository': pull_request.target_repo.repo_name,
4377 'reference': {
4386 'reference': {
4378 'name': pull_request.target_ref_parts.name,
4387 'name': pull_request.target_ref_parts.name,
4379 'type': pull_request.target_ref_parts.type,
4388 'type': pull_request.target_ref_parts.type,
4380 'commit_id': pull_request.target_ref_parts.commit_id,
4389 'commit_id': pull_request.target_ref_parts.commit_id,
4381 },
4390 },
4382 },
4391 },
4383 'merge': merge_data,
4392 'merge': merge_data,
4384 'author': pull_request.author.get_api_data(include_secrets=False,
4393 'author': pull_request.author.get_api_data(include_secrets=False,
4385 details='basic'),
4394 details='basic'),
4386 'reviewers': [
4395 'reviewers': [
4387 {
4396 {
4388 'user': reviewer.get_api_data(include_secrets=False,
4397 'user': reviewer.get_api_data(include_secrets=False,
4389 details='basic'),
4398 details='basic'),
4390 'reasons': reasons,
4399 'reasons': reasons,
4391 'review_status': st[0][1].status if st else 'not_reviewed',
4400 'review_status': st[0][1].status if st else 'not_reviewed',
4392 }
4401 }
4393 for obj, reviewer, reasons, mandatory, st in
4402 for obj, reviewer, reasons, mandatory, st in
4394 pull_request.reviewers_statuses()
4403 pull_request.reviewers_statuses()
4395 ]
4404 ]
4396 }
4405 }
4397
4406
4398 return data
4407 return data
4399
4408
4400 def set_state(self, pull_request_state, final_state=None):
4409 def set_state(self, pull_request_state, final_state=None):
4401 """
4410 """
4402 # goes from initial state to updating to initial state.
4411 # goes from initial state to updating to initial state.
4403 # initial state can be changed by specifying back_state=
4412 # initial state can be changed by specifying back_state=
4404 with pull_request_obj.set_state(PullRequest.STATE_UPDATING):
4413 with pull_request_obj.set_state(PullRequest.STATE_UPDATING):
4405 pull_request.merge()
4414 pull_request.merge()
4406
4415
4407 :param pull_request_state:
4416 :param pull_request_state:
4408 :param final_state:
4417 :param final_state:
4409
4418
4410 """
4419 """
4411
4420
4412 return _SetState(self, pull_request_state, back_state=final_state)
4421 return _SetState(self, pull_request_state, back_state=final_state)
4413
4422
4414
4423
4415 class PullRequest(Base, _PullRequestBase):
4424 class PullRequest(Base, _PullRequestBase):
4416 __tablename__ = 'pull_requests'
4425 __tablename__ = 'pull_requests'
4417 __table_args__ = (
4426 __table_args__ = (
4418 base_table_args,
4427 base_table_args,
4419 )
4428 )
4420 LATEST_VER = 'latest'
4429 LATEST_VER = 'latest'
4421
4430
4422 pull_request_id = Column(
4431 pull_request_id = Column(
4423 'pull_request_id', Integer(), nullable=False, primary_key=True)
4432 'pull_request_id', Integer(), nullable=False, primary_key=True)
4424
4433
4425 def __repr__(self):
4434 def __repr__(self):
4426 if self.pull_request_id:
4435 if self.pull_request_id:
4427 return f'<DB:PullRequest #{self.pull_request_id}>'
4436 return f'<DB:PullRequest #{self.pull_request_id}>'
4428 else:
4437 else:
4429 return f'<DB:PullRequest at {id(self)!r}>'
4438 return f'<DB:PullRequest at {id(self)!r}>'
4430
4439
4431 reviewers = relationship('PullRequestReviewers', cascade="all, delete-orphan", back_populates='pull_request')
4440 reviewers = relationship('PullRequestReviewers', cascade="all, delete-orphan", back_populates='pull_request')
4432 statuses = relationship('ChangesetStatus', cascade="all, delete-orphan", back_populates='pull_request')
4441 statuses = relationship('ChangesetStatus', cascade="all, delete-orphan", back_populates='pull_request')
4433 comments = relationship('ChangesetComment', cascade="all, delete-orphan", back_populates='pull_request')
4442 comments = relationship('ChangesetComment', cascade="all, delete-orphan", back_populates='pull_request')
4434 versions = relationship('PullRequestVersion', cascade="all, delete-orphan", lazy='dynamic', back_populates='pull_request')
4443 versions = relationship('PullRequestVersion', cascade="all, delete-orphan", lazy='dynamic', back_populates='pull_request')
4435
4444
4436 @classmethod
4445 @classmethod
4437 def get_pr_display_object(cls, pull_request_obj, org_pull_request_obj,
4446 def get_pr_display_object(cls, pull_request_obj, org_pull_request_obj,
4438 internal_methods=None):
4447 internal_methods=None):
4439
4448
4440 class PullRequestDisplay(object):
4449 class PullRequestDisplay(object):
4441 """
4450 """
4442 Special object wrapper for showing PullRequest data via Versions
4451 Special object wrapper for showing PullRequest data via Versions
4443 It mimics PR object as close as possible. This is read only object
4452 It mimics PR object as close as possible. This is read only object
4444 just for display
4453 just for display
4445 """
4454 """
4446
4455
4447 def __init__(self, attrs, internal=None):
4456 def __init__(self, attrs, internal=None):
4448 self.attrs = attrs
4457 self.attrs = attrs
4449 # internal have priority over the given ones via attrs
4458 # internal have priority over the given ones via attrs
4450 self.internal = internal or ['versions']
4459 self.internal = internal or ['versions']
4451
4460
4452 def __getattr__(self, item):
4461 def __getattr__(self, item):
4453 if item in self.internal:
4462 if item in self.internal:
4454 return getattr(self, item)
4463 return getattr(self, item)
4455 try:
4464 try:
4456 return self.attrs[item]
4465 return self.attrs[item]
4457 except KeyError:
4466 except KeyError:
4458 raise AttributeError(
4467 raise AttributeError(
4459 '%s object has no attribute %s' % (self, item))
4468 '%s object has no attribute %s' % (self, item))
4460
4469
4461 def __repr__(self):
4470 def __repr__(self):
4462 pr_id = self.attrs.get('pull_request_id')
4471 pr_id = self.attrs.get('pull_request_id')
4463 return f'<DB:PullRequestDisplay #{pr_id}>'
4472 return f'<DB:PullRequestDisplay #{pr_id}>'
4464
4473
4465 def versions(self):
4474 def versions(self):
4466 return pull_request_obj.versions.order_by(
4475 return pull_request_obj.versions.order_by(
4467 PullRequestVersion.pull_request_version_id).all()
4476 PullRequestVersion.pull_request_version_id).all()
4468
4477
4469 def is_closed(self):
4478 def is_closed(self):
4470 return pull_request_obj.is_closed()
4479 return pull_request_obj.is_closed()
4471
4480
4472 def is_state_changing(self):
4481 def is_state_changing(self):
4473 return pull_request_obj.is_state_changing()
4482 return pull_request_obj.is_state_changing()
4474
4483
4475 @property
4484 @property
4476 def pull_request_version_id(self):
4485 def pull_request_version_id(self):
4477 return getattr(pull_request_obj, 'pull_request_version_id', None)
4486 return getattr(pull_request_obj, 'pull_request_version_id', None)
4478
4487
4479 @property
4488 @property
4480 def pull_request_last_version(self):
4489 def pull_request_last_version(self):
4481 return pull_request_obj.pull_request_last_version
4490 return pull_request_obj.pull_request_last_version
4482
4491
4483 attrs = StrictAttributeDict(pull_request_obj.get_api_data(with_merge_state=False))
4492 attrs = StrictAttributeDict(pull_request_obj.get_api_data(with_merge_state=False))
4484
4493
4485 attrs.author = StrictAttributeDict(
4494 attrs.author = StrictAttributeDict(
4486 pull_request_obj.author.get_api_data())
4495 pull_request_obj.author.get_api_data())
4487 if pull_request_obj.target_repo:
4496 if pull_request_obj.target_repo:
4488 attrs.target_repo = StrictAttributeDict(
4497 attrs.target_repo = StrictAttributeDict(
4489 pull_request_obj.target_repo.get_api_data())
4498 pull_request_obj.target_repo.get_api_data())
4490 attrs.target_repo.clone_url = pull_request_obj.target_repo.clone_url
4499 attrs.target_repo.clone_url = pull_request_obj.target_repo.clone_url
4491
4500
4492 if pull_request_obj.source_repo:
4501 if pull_request_obj.source_repo:
4493 attrs.source_repo = StrictAttributeDict(
4502 attrs.source_repo = StrictAttributeDict(
4494 pull_request_obj.source_repo.get_api_data())
4503 pull_request_obj.source_repo.get_api_data())
4495 attrs.source_repo.clone_url = pull_request_obj.source_repo.clone_url
4504 attrs.source_repo.clone_url = pull_request_obj.source_repo.clone_url
4496
4505
4497 attrs.source_ref_parts = pull_request_obj.source_ref_parts
4506 attrs.source_ref_parts = pull_request_obj.source_ref_parts
4498 attrs.target_ref_parts = pull_request_obj.target_ref_parts
4507 attrs.target_ref_parts = pull_request_obj.target_ref_parts
4499 attrs.revisions = pull_request_obj.revisions
4508 attrs.revisions = pull_request_obj.revisions
4500 attrs.common_ancestor_id = pull_request_obj.common_ancestor_id
4509 attrs.common_ancestor_id = pull_request_obj.common_ancestor_id
4501 attrs.shadow_merge_ref = org_pull_request_obj.shadow_merge_ref
4510 attrs.shadow_merge_ref = org_pull_request_obj.shadow_merge_ref
4502 attrs.reviewer_data = org_pull_request_obj.reviewer_data
4511 attrs.reviewer_data = org_pull_request_obj.reviewer_data
4503 attrs.reviewer_data_json = org_pull_request_obj.reviewer_data_json
4512 attrs.reviewer_data_json = org_pull_request_obj.reviewer_data_json
4504
4513
4505 return PullRequestDisplay(attrs, internal=internal_methods)
4514 return PullRequestDisplay(attrs, internal=internal_methods)
4506
4515
4507 def is_closed(self):
4516 def is_closed(self):
4508 return self.status == self.STATUS_CLOSED
4517 return self.status == self.STATUS_CLOSED
4509
4518
4510 def is_state_changing(self):
4519 def is_state_changing(self):
4511 return self.pull_request_state != PullRequest.STATE_CREATED
4520 return self.pull_request_state != PullRequest.STATE_CREATED
4512
4521
4513 def __json__(self):
4522 def __json__(self):
4514 return {
4523 return {
4515 'revisions': self.revisions,
4524 'revisions': self.revisions,
4516 'versions': self.versions_count
4525 'versions': self.versions_count
4517 }
4526 }
4518
4527
4519 def calculated_review_status(self):
4528 def calculated_review_status(self):
4520 from rhodecode.model.changeset_status import ChangesetStatusModel
4529 from rhodecode.model.changeset_status import ChangesetStatusModel
4521 return ChangesetStatusModel().calculated_review_status(self)
4530 return ChangesetStatusModel().calculated_review_status(self)
4522
4531
4523 def reviewers_statuses(self, user=None):
4532 def reviewers_statuses(self, user=None):
4524 from rhodecode.model.changeset_status import ChangesetStatusModel
4533 from rhodecode.model.changeset_status import ChangesetStatusModel
4525 return ChangesetStatusModel().reviewers_statuses(self, user=user)
4534 return ChangesetStatusModel().reviewers_statuses(self, user=user)
4526
4535
4527 def get_pull_request_reviewers(self, role=None):
4536 def get_pull_request_reviewers(self, role=None):
4528 qry = PullRequestReviewers.query()\
4537 qry = PullRequestReviewers.query()\
4529 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)
4538 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)
4530 if role:
4539 if role:
4531 qry = qry.filter(PullRequestReviewers.role == role)
4540 qry = qry.filter(PullRequestReviewers.role == role)
4532
4541
4533 return qry.all()
4542 return qry.all()
4534
4543
4535 @property
4544 @property
4536 def reviewers_count(self):
4545 def reviewers_count(self):
4537 qry = PullRequestReviewers.query()\
4546 qry = PullRequestReviewers.query()\
4538 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)\
4547 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)\
4539 .filter(PullRequestReviewers.role == PullRequestReviewers.ROLE_REVIEWER)
4548 .filter(PullRequestReviewers.role == PullRequestReviewers.ROLE_REVIEWER)
4540 return qry.count()
4549 return qry.count()
4541
4550
4542 @property
4551 @property
4543 def observers_count(self):
4552 def observers_count(self):
4544 qry = PullRequestReviewers.query()\
4553 qry = PullRequestReviewers.query()\
4545 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)\
4554 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)\
4546 .filter(PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER)
4555 .filter(PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER)
4547 return qry.count()
4556 return qry.count()
4548
4557
4549 def observers(self):
4558 def observers(self):
4550 qry = PullRequestReviewers.query()\
4559 qry = PullRequestReviewers.query()\
4551 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)\
4560 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)\
4552 .filter(PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER)\
4561 .filter(PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER)\
4553 .all()
4562 .all()
4554
4563
4555 for entry in qry:
4564 for entry in qry:
4556 yield entry, entry.user
4565 yield entry, entry.user
4557
4566
4558 @property
4567 @property
4559 def workspace_id(self):
4568 def workspace_id(self):
4560 from rhodecode.model.pull_request import PullRequestModel
4569 from rhodecode.model.pull_request import PullRequestModel
4561 return PullRequestModel()._workspace_id(self)
4570 return PullRequestModel()._workspace_id(self)
4562
4571
4563 def get_shadow_repo(self):
4572 def get_shadow_repo(self):
4564 workspace_id = self.workspace_id
4573 workspace_id = self.workspace_id
4565 shadow_repository_path = self.target_repo.get_shadow_repository_path(workspace_id)
4574 shadow_repository_path = self.target_repo.get_shadow_repository_path(workspace_id)
4566 if os.path.isdir(shadow_repository_path):
4575 if os.path.isdir(shadow_repository_path):
4567 vcs_obj = self.target_repo.scm_instance()
4576 vcs_obj = self.target_repo.scm_instance()
4568 return vcs_obj.get_shadow_instance(shadow_repository_path)
4577 return vcs_obj.get_shadow_instance(shadow_repository_path)
4569
4578
4570 @property
4579 @property
4571 def versions_count(self):
4580 def versions_count(self):
4572 """
4581 """
4573 return number of versions this PR have, e.g a PR that once been
4582 return number of versions this PR have, e.g a PR that once been
4574 updated will have 2 versions
4583 updated will have 2 versions
4575 """
4584 """
4576 return self.versions.count() + 1
4585 return self.versions.count() + 1
4577
4586
4578 @property
4587 @property
4579 def pull_request_last_version(self):
4588 def pull_request_last_version(self):
4580 return self.versions_count
4589 return self.versions_count
4581
4590
4582
4591
4583 class PullRequestVersion(Base, _PullRequestBase):
4592 class PullRequestVersion(Base, _PullRequestBase):
4584 __tablename__ = 'pull_request_versions'
4593 __tablename__ = 'pull_request_versions'
4585 __table_args__ = (
4594 __table_args__ = (
4586 base_table_args,
4595 base_table_args,
4587 )
4596 )
4588
4597
4589 pull_request_version_id = Column('pull_request_version_id', Integer(), nullable=False, primary_key=True)
4598 pull_request_version_id = Column('pull_request_version_id', Integer(), nullable=False, primary_key=True)
4590 pull_request_id = Column('pull_request_id', Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=False)
4599 pull_request_id = Column('pull_request_id', Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=False)
4591 pull_request = relationship('PullRequest', back_populates='versions')
4600 pull_request = relationship('PullRequest', back_populates='versions')
4592
4601
4593 def __repr__(self):
4602 def __repr__(self):
4594 if self.pull_request_version_id:
4603 if self.pull_request_version_id:
4595 return f'<DB:PullRequestVersion #{self.pull_request_version_id}>'
4604 return f'<DB:PullRequestVersion #{self.pull_request_version_id}>'
4596 else:
4605 else:
4597 return f'<DB:PullRequestVersion at {id(self)!r}>'
4606 return f'<DB:PullRequestVersion at {id(self)!r}>'
4598
4607
4599 @property
4608 @property
4600 def reviewers(self):
4609 def reviewers(self):
4601 return self.pull_request.reviewers
4610 return self.pull_request.reviewers
4602
4611
4603 @property
4612 @property
4604 def versions(self):
4613 def versions(self):
4605 return self.pull_request.versions
4614 return self.pull_request.versions
4606
4615
4607 def is_closed(self):
4616 def is_closed(self):
4608 # calculate from original
4617 # calculate from original
4609 return self.pull_request.status == self.STATUS_CLOSED
4618 return self.pull_request.status == self.STATUS_CLOSED
4610
4619
4611 def is_state_changing(self):
4620 def is_state_changing(self):
4612 return self.pull_request.pull_request_state != PullRequest.STATE_CREATED
4621 return self.pull_request.pull_request_state != PullRequest.STATE_CREATED
4613
4622
4614 def calculated_review_status(self):
4623 def calculated_review_status(self):
4615 return self.pull_request.calculated_review_status()
4624 return self.pull_request.calculated_review_status()
4616
4625
4617 def reviewers_statuses(self):
4626 def reviewers_statuses(self):
4618 return self.pull_request.reviewers_statuses()
4627 return self.pull_request.reviewers_statuses()
4619
4628
4620 def observers(self):
4629 def observers(self):
4621 return self.pull_request.observers()
4630 return self.pull_request.observers()
4622
4631
4623
4632
4624 class PullRequestReviewers(Base, BaseModel):
4633 class PullRequestReviewers(Base, BaseModel):
4625 __tablename__ = 'pull_request_reviewers'
4634 __tablename__ = 'pull_request_reviewers'
4626 __table_args__ = (
4635 __table_args__ = (
4627 base_table_args,
4636 base_table_args,
4628 )
4637 )
4629 ROLE_REVIEWER = 'reviewer'
4638 ROLE_REVIEWER = 'reviewer'
4630 ROLE_OBSERVER = 'observer'
4639 ROLE_OBSERVER = 'observer'
4631 ROLES = [ROLE_REVIEWER, ROLE_OBSERVER]
4640 ROLES = [ROLE_REVIEWER, ROLE_OBSERVER]
4632
4641
4633 @hybrid_property
4642 @hybrid_property
4634 def reasons(self):
4643 def reasons(self):
4635 if not self._reasons:
4644 if not self._reasons:
4636 return []
4645 return []
4637 return self._reasons
4646 return self._reasons
4638
4647
4639 @reasons.setter
4648 @reasons.setter
4640 def reasons(self, val):
4649 def reasons(self, val):
4641 val = val or []
4650 val = val or []
4642 if any(not isinstance(x, str) for x in val):
4651 if any(not isinstance(x, str) for x in val):
4643 raise Exception('invalid reasons type, must be list of strings')
4652 raise Exception('invalid reasons type, must be list of strings')
4644 self._reasons = val
4653 self._reasons = val
4645
4654
4646 pull_requests_reviewers_id = Column(
4655 pull_requests_reviewers_id = Column(
4647 'pull_requests_reviewers_id', Integer(), nullable=False,
4656 'pull_requests_reviewers_id', Integer(), nullable=False,
4648 primary_key=True)
4657 primary_key=True)
4649 pull_request_id = Column(
4658 pull_request_id = Column(
4650 "pull_request_id", Integer(),
4659 "pull_request_id", Integer(),
4651 ForeignKey('pull_requests.pull_request_id'), nullable=False)
4660 ForeignKey('pull_requests.pull_request_id'), nullable=False)
4652 user_id = Column(
4661 user_id = Column(
4653 "user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
4662 "user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
4654 _reasons = Column(
4663 _reasons = Column(
4655 'reason', MutationList.as_mutable(
4664 'reason', MutationList.as_mutable(
4656 JsonType('list', dialect_map=dict(mysql=UnicodeText(16384)))))
4665 JsonType('list', dialect_map=dict(mysql=UnicodeText(16384)))))
4657
4666
4658 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4667 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4659 role = Column('role', Unicode(255), nullable=True, default=ROLE_REVIEWER)
4668 role = Column('role', Unicode(255), nullable=True, default=ROLE_REVIEWER)
4660
4669
4661 user = relationship('User')
4670 user = relationship('User')
4662 pull_request = relationship('PullRequest', back_populates='reviewers')
4671 pull_request = relationship('PullRequest', back_populates='reviewers')
4663
4672
4664 rule_data = Column(
4673 rule_data = Column(
4665 'rule_data_json',
4674 'rule_data_json',
4666 JsonType(dialect_map=dict(mysql=UnicodeText(16384))))
4675 JsonType(dialect_map=dict(mysql=UnicodeText(16384))))
4667
4676
4668 def rule_user_group_data(self):
4677 def rule_user_group_data(self):
4669 """
4678 """
4670 Returns the voting user group rule data for this reviewer
4679 Returns the voting user group rule data for this reviewer
4671 """
4680 """
4672
4681
4673 if self.rule_data and 'vote_rule' in self.rule_data:
4682 if self.rule_data and 'vote_rule' in self.rule_data:
4674 user_group_data = {}
4683 user_group_data = {}
4675 if 'rule_user_group_entry_id' in self.rule_data:
4684 if 'rule_user_group_entry_id' in self.rule_data:
4676 # means a group with voting rules !
4685 # means a group with voting rules !
4677 user_group_data['id'] = self.rule_data['rule_user_group_entry_id']
4686 user_group_data['id'] = self.rule_data['rule_user_group_entry_id']
4678 user_group_data['name'] = self.rule_data['rule_name']
4687 user_group_data['name'] = self.rule_data['rule_name']
4679 user_group_data['vote_rule'] = self.rule_data['vote_rule']
4688 user_group_data['vote_rule'] = self.rule_data['vote_rule']
4680
4689
4681 return user_group_data
4690 return user_group_data
4682
4691
4683 @classmethod
4692 @classmethod
4684 def get_pull_request_reviewers(cls, pull_request_id, role=None):
4693 def get_pull_request_reviewers(cls, pull_request_id, role=None):
4685 qry = PullRequestReviewers.query()\
4694 qry = PullRequestReviewers.query()\
4686 .filter(PullRequestReviewers.pull_request_id == pull_request_id)
4695 .filter(PullRequestReviewers.pull_request_id == pull_request_id)
4687 if role:
4696 if role:
4688 qry = qry.filter(PullRequestReviewers.role == role)
4697 qry = qry.filter(PullRequestReviewers.role == role)
4689
4698
4690 return qry.all()
4699 return qry.all()
4691
4700
4692 def __repr__(self):
4701 def __repr__(self):
4693 return f"<{self.cls_name}('id:{self.pull_requests_reviewers_id}')>"
4702 return f"<{self.cls_name}('id:{self.pull_requests_reviewers_id}')>"
4694
4703
4695
4704
4696 class Notification(Base, BaseModel):
4705 class Notification(Base, BaseModel):
4697 __tablename__ = 'notifications'
4706 __tablename__ = 'notifications'
4698 __table_args__ = (
4707 __table_args__ = (
4699 Index('notification_type_idx', 'type'),
4708 Index('notification_type_idx', 'type'),
4700 base_table_args,
4709 base_table_args,
4701 )
4710 )
4702
4711
4703 TYPE_CHANGESET_COMMENT = 'cs_comment'
4712 TYPE_CHANGESET_COMMENT = 'cs_comment'
4704 TYPE_MESSAGE = 'message'
4713 TYPE_MESSAGE = 'message'
4705 TYPE_MENTION = 'mention'
4714 TYPE_MENTION = 'mention'
4706 TYPE_REGISTRATION = 'registration'
4715 TYPE_REGISTRATION = 'registration'
4707 TYPE_PULL_REQUEST = 'pull_request'
4716 TYPE_PULL_REQUEST = 'pull_request'
4708 TYPE_PULL_REQUEST_COMMENT = 'pull_request_comment'
4717 TYPE_PULL_REQUEST_COMMENT = 'pull_request_comment'
4709 TYPE_PULL_REQUEST_UPDATE = 'pull_request_update'
4718 TYPE_PULL_REQUEST_UPDATE = 'pull_request_update'
4710
4719
4711 notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
4720 notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
4712 subject = Column('subject', Unicode(512), nullable=True)
4721 subject = Column('subject', Unicode(512), nullable=True)
4713 body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
4722 body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
4714 created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
4723 created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
4715 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4724 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4716 type_ = Column('type', Unicode(255))
4725 type_ = Column('type', Unicode(255))
4717
4726
4718 created_by_user = relationship('User', back_populates='user_created_notifications')
4727 created_by_user = relationship('User', back_populates='user_created_notifications')
4719 notifications_to_users = relationship('UserNotification', lazy='joined', cascade="all, delete-orphan", back_populates='notification')
4728 notifications_to_users = relationship('UserNotification', lazy='joined', cascade="all, delete-orphan", back_populates='notification')
4720
4729
4721 @property
4730 @property
4722 def recipients(self):
4731 def recipients(self):
4723 return [x.user for x in UserNotification.query()\
4732 return [x.user for x in UserNotification.query()\
4724 .filter(UserNotification.notification == self)\
4733 .filter(UserNotification.notification == self)\
4725 .order_by(UserNotification.user_id.asc()).all()]
4734 .order_by(UserNotification.user_id.asc()).all()]
4726
4735
4727 @classmethod
4736 @classmethod
4728 def create(cls, created_by, subject, body, recipients, type_=None):
4737 def create(cls, created_by, subject, body, recipients, type_=None):
4729 if type_ is None:
4738 if type_ is None:
4730 type_ = Notification.TYPE_MESSAGE
4739 type_ = Notification.TYPE_MESSAGE
4731
4740
4732 notification = cls()
4741 notification = cls()
4733 notification.created_by_user = created_by
4742 notification.created_by_user = created_by
4734 notification.subject = subject
4743 notification.subject = subject
4735 notification.body = body
4744 notification.body = body
4736 notification.type_ = type_
4745 notification.type_ = type_
4737 notification.created_on = datetime.datetime.now()
4746 notification.created_on = datetime.datetime.now()
4738
4747
4739 # For each recipient link the created notification to his account
4748 # For each recipient link the created notification to his account
4740 for u in recipients:
4749 for u in recipients:
4741 assoc = UserNotification()
4750 assoc = UserNotification()
4742 assoc.user_id = u.user_id
4751 assoc.user_id = u.user_id
4743 assoc.notification = notification
4752 assoc.notification = notification
4744
4753
4745 # if created_by is inside recipients mark his notification
4754 # if created_by is inside recipients mark his notification
4746 # as read
4755 # as read
4747 if u.user_id == created_by.user_id:
4756 if u.user_id == created_by.user_id:
4748 assoc.read = True
4757 assoc.read = True
4749 Session().add(assoc)
4758 Session().add(assoc)
4750
4759
4751 Session().add(notification)
4760 Session().add(notification)
4752
4761
4753 return notification
4762 return notification
4754
4763
4755
4764
4756 class UserNotification(Base, BaseModel):
4765 class UserNotification(Base, BaseModel):
4757 __tablename__ = 'user_to_notification'
4766 __tablename__ = 'user_to_notification'
4758 __table_args__ = (
4767 __table_args__ = (
4759 UniqueConstraint('user_id', 'notification_id'),
4768 UniqueConstraint('user_id', 'notification_id'),
4760 base_table_args
4769 base_table_args
4761 )
4770 )
4762
4771
4763 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
4772 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
4764 notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
4773 notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
4765 read = Column('read', Boolean, default=False)
4774 read = Column('read', Boolean, default=False)
4766 sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
4775 sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
4767
4776
4768 user = relationship('User', lazy="joined", back_populates='notifications')
4777 user = relationship('User', lazy="joined", back_populates='notifications')
4769 notification = relationship('Notification', lazy="joined", order_by=lambda: Notification.created_on.desc(), back_populates='notifications_to_users')
4778 notification = relationship('Notification', lazy="joined", order_by=lambda: Notification.created_on.desc(), back_populates='notifications_to_users')
4770
4779
4771 def mark_as_read(self):
4780 def mark_as_read(self):
4772 self.read = True
4781 self.read = True
4773 Session().add(self)
4782 Session().add(self)
4774
4783
4775
4784
4776 class UserNotice(Base, BaseModel):
4785 class UserNotice(Base, BaseModel):
4777 __tablename__ = 'user_notices'
4786 __tablename__ = 'user_notices'
4778 __table_args__ = (
4787 __table_args__ = (
4779 base_table_args
4788 base_table_args
4780 )
4789 )
4781
4790
4782 NOTIFICATION_TYPE_MESSAGE = 'message'
4791 NOTIFICATION_TYPE_MESSAGE = 'message'
4783 NOTIFICATION_TYPE_NOTICE = 'notice'
4792 NOTIFICATION_TYPE_NOTICE = 'notice'
4784
4793
4785 NOTIFICATION_LEVEL_INFO = 'info'
4794 NOTIFICATION_LEVEL_INFO = 'info'
4786 NOTIFICATION_LEVEL_WARNING = 'warning'
4795 NOTIFICATION_LEVEL_WARNING = 'warning'
4787 NOTIFICATION_LEVEL_ERROR = 'error'
4796 NOTIFICATION_LEVEL_ERROR = 'error'
4788
4797
4789 user_notice_id = Column('gist_id', Integer(), primary_key=True)
4798 user_notice_id = Column('gist_id', Integer(), primary_key=True)
4790
4799
4791 notice_subject = Column('notice_subject', Unicode(512), nullable=True)
4800 notice_subject = Column('notice_subject', Unicode(512), nullable=True)
4792 notice_body = Column('notice_body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
4801 notice_body = Column('notice_body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
4793
4802
4794 notice_read = Column('notice_read', Boolean, default=False)
4803 notice_read = Column('notice_read', Boolean, default=False)
4795
4804
4796 notification_level = Column('notification_level', String(1024), default=NOTIFICATION_LEVEL_INFO)
4805 notification_level = Column('notification_level', String(1024), default=NOTIFICATION_LEVEL_INFO)
4797 notification_type = Column('notification_type', String(1024), default=NOTIFICATION_TYPE_NOTICE)
4806 notification_type = Column('notification_type', String(1024), default=NOTIFICATION_TYPE_NOTICE)
4798
4807
4799 notice_created_by = Column('notice_created_by', Integer(), ForeignKey('users.user_id'), nullable=True)
4808 notice_created_by = Column('notice_created_by', Integer(), ForeignKey('users.user_id'), nullable=True)
4800 notice_created_on = Column('notice_created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4809 notice_created_on = Column('notice_created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4801
4810
4802 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'))
4811 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'))
4803 user = relationship('User', lazy="joined", primaryjoin='User.user_id==UserNotice.user_id')
4812 user = relationship('User', lazy="joined", primaryjoin='User.user_id==UserNotice.user_id')
4804
4813
4805 @classmethod
4814 @classmethod
4806 def create_for_user(cls, user, subject, body, notice_level=NOTIFICATION_LEVEL_INFO, allow_duplicate=False):
4815 def create_for_user(cls, user, subject, body, notice_level=NOTIFICATION_LEVEL_INFO, allow_duplicate=False):
4807
4816
4808 if notice_level not in [cls.NOTIFICATION_LEVEL_ERROR,
4817 if notice_level not in [cls.NOTIFICATION_LEVEL_ERROR,
4809 cls.NOTIFICATION_LEVEL_WARNING,
4818 cls.NOTIFICATION_LEVEL_WARNING,
4810 cls.NOTIFICATION_LEVEL_INFO]:
4819 cls.NOTIFICATION_LEVEL_INFO]:
4811 return
4820 return
4812
4821
4813 from rhodecode.model.user import UserModel
4822 from rhodecode.model.user import UserModel
4814 user = UserModel().get_user(user)
4823 user = UserModel().get_user(user)
4815
4824
4816 new_notice = UserNotice()
4825 new_notice = UserNotice()
4817 if not allow_duplicate:
4826 if not allow_duplicate:
4818 existing_msg = UserNotice().query() \
4827 existing_msg = UserNotice().query() \
4819 .filter(UserNotice.user == user) \
4828 .filter(UserNotice.user == user) \
4820 .filter(UserNotice.notice_body == body) \
4829 .filter(UserNotice.notice_body == body) \
4821 .filter(UserNotice.notice_read == false()) \
4830 .filter(UserNotice.notice_read == false()) \
4822 .scalar()
4831 .scalar()
4823 if existing_msg:
4832 if existing_msg:
4824 log.warning('Ignoring duplicate notice for user %s', user)
4833 log.warning('Ignoring duplicate notice for user %s', user)
4825 return
4834 return
4826
4835
4827 new_notice.user = user
4836 new_notice.user = user
4828 new_notice.notice_subject = subject
4837 new_notice.notice_subject = subject
4829 new_notice.notice_body = body
4838 new_notice.notice_body = body
4830 new_notice.notification_level = notice_level
4839 new_notice.notification_level = notice_level
4831 Session().add(new_notice)
4840 Session().add(new_notice)
4832 Session().commit()
4841 Session().commit()
4833
4842
4834
4843
4835 class Gist(Base, BaseModel):
4844 class Gist(Base, BaseModel):
4836 __tablename__ = 'gists'
4845 __tablename__ = 'gists'
4837 __table_args__ = (
4846 __table_args__ = (
4838 Index('g_gist_access_id_idx', 'gist_access_id'),
4847 Index('g_gist_access_id_idx', 'gist_access_id'),
4839 Index('g_created_on_idx', 'created_on'),
4848 Index('g_created_on_idx', 'created_on'),
4840 base_table_args
4849 base_table_args
4841 )
4850 )
4842
4851
4843 GIST_PUBLIC = 'public'
4852 GIST_PUBLIC = 'public'
4844 GIST_PRIVATE = 'private'
4853 GIST_PRIVATE = 'private'
4845 DEFAULT_FILENAME = 'gistfile1.txt'
4854 DEFAULT_FILENAME = 'gistfile1.txt'
4846
4855
4847 ACL_LEVEL_PUBLIC = 'acl_public'
4856 ACL_LEVEL_PUBLIC = 'acl_public'
4848 ACL_LEVEL_PRIVATE = 'acl_private'
4857 ACL_LEVEL_PRIVATE = 'acl_private'
4849
4858
4850 gist_id = Column('gist_id', Integer(), primary_key=True)
4859 gist_id = Column('gist_id', Integer(), primary_key=True)
4851 gist_access_id = Column('gist_access_id', Unicode(250))
4860 gist_access_id = Column('gist_access_id', Unicode(250))
4852 gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
4861 gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
4853 gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
4862 gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
4854 gist_expires = Column('gist_expires', Float(53), nullable=False)
4863 gist_expires = Column('gist_expires', Float(53), nullable=False)
4855 gist_type = Column('gist_type', Unicode(128), nullable=False)
4864 gist_type = Column('gist_type', Unicode(128), nullable=False)
4856 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4865 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4857 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4866 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4858 acl_level = Column('acl_level', Unicode(128), nullable=True)
4867 acl_level = Column('acl_level', Unicode(128), nullable=True)
4859
4868
4860 owner = relationship('User', back_populates='user_gists')
4869 owner = relationship('User', back_populates='user_gists')
4861
4870
4862 def __repr__(self):
4871 def __repr__(self):
4863 return f'<Gist:[{self.gist_type}]{self.gist_access_id}>'
4872 return f'<Gist:[{self.gist_type}]{self.gist_access_id}>'
4864
4873
4865 @hybrid_property
4874 @hybrid_property
4866 def description_safe(self):
4875 def description_safe(self):
4867 from rhodecode.lib import helpers as h
4876 from rhodecode.lib import helpers as h
4868 return h.escape(self.gist_description)
4877 return h.escape(self.gist_description)
4869
4878
4870 @classmethod
4879 @classmethod
4871 def get_or_404(cls, id_):
4880 def get_or_404(cls, id_):
4872 from pyramid.httpexceptions import HTTPNotFound
4881 from pyramid.httpexceptions import HTTPNotFound
4873
4882
4874 res = cls.query().filter(cls.gist_access_id == id_).scalar()
4883 res = cls.query().filter(cls.gist_access_id == id_).scalar()
4875 if not res:
4884 if not res:
4876 log.debug('WARN: No DB entry with id %s', id_)
4885 log.debug('WARN: No DB entry with id %s', id_)
4877 raise HTTPNotFound()
4886 raise HTTPNotFound()
4878 return res
4887 return res
4879
4888
4880 @classmethod
4889 @classmethod
4881 def get_by_access_id(cls, gist_access_id):
4890 def get_by_access_id(cls, gist_access_id):
4882 return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
4891 return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
4883
4892
4884 def gist_url(self):
4893 def gist_url(self):
4885 from rhodecode.model.gist import GistModel
4894 from rhodecode.model.gist import GistModel
4886 return GistModel().get_url(self)
4895 return GistModel().get_url(self)
4887
4896
4888 @classmethod
4897 @classmethod
4889 def base_path(cls):
4898 def base_path(cls):
4890 """
4899 """
4891 Returns base path when all gists are stored
4900 Returns base path when all gists are stored
4892
4901
4893 :param cls:
4902 :param cls:
4894 """
4903 """
4895 from rhodecode.model.gist import GIST_STORE_LOC
4904 from rhodecode.model.gist import GIST_STORE_LOC
4896 q = Session().query(RhodeCodeUi)\
4905 q = Session().query(RhodeCodeUi)\
4897 .filter(RhodeCodeUi.ui_key == URL_SEP)
4906 .filter(RhodeCodeUi.ui_key == URL_SEP)
4898 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
4907 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
4899 return os.path.join(q.one().ui_value, GIST_STORE_LOC)
4908 return os.path.join(q.one().ui_value, GIST_STORE_LOC)
4900
4909
4901 def get_api_data(self):
4910 def get_api_data(self):
4902 """
4911 """
4903 Common function for generating gist related data for API
4912 Common function for generating gist related data for API
4904 """
4913 """
4905 gist = self
4914 gist = self
4906 data = {
4915 data = {
4907 'gist_id': gist.gist_id,
4916 'gist_id': gist.gist_id,
4908 'type': gist.gist_type,
4917 'type': gist.gist_type,
4909 'access_id': gist.gist_access_id,
4918 'access_id': gist.gist_access_id,
4910 'description': gist.gist_description,
4919 'description': gist.gist_description,
4911 'url': gist.gist_url(),
4920 'url': gist.gist_url(),
4912 'expires': gist.gist_expires,
4921 'expires': gist.gist_expires,
4913 'created_on': gist.created_on,
4922 'created_on': gist.created_on,
4914 'modified_at': gist.modified_at,
4923 'modified_at': gist.modified_at,
4915 'content': None,
4924 'content': None,
4916 'acl_level': gist.acl_level,
4925 'acl_level': gist.acl_level,
4917 }
4926 }
4918 return data
4927 return data
4919
4928
4920 def __json__(self):
4929 def __json__(self):
4921 data = dict(
4930 data = dict(
4922 )
4931 )
4923 data.update(self.get_api_data())
4932 data.update(self.get_api_data())
4924 return data
4933 return data
4925 # SCM functions
4934 # SCM functions
4926
4935
4927 def scm_instance(self, **kwargs):
4936 def scm_instance(self, **kwargs):
4928 """
4937 """
4929 Get an instance of VCS Repository
4938 Get an instance of VCS Repository
4930
4939
4931 :param kwargs:
4940 :param kwargs:
4932 """
4941 """
4933 from rhodecode.model.gist import GistModel
4942 from rhodecode.model.gist import GistModel
4934 full_repo_path = os.path.join(self.base_path(), self.gist_access_id)
4943 full_repo_path = os.path.join(self.base_path(), self.gist_access_id)
4935 return get_vcs_instance(
4944 return get_vcs_instance(
4936 repo_path=safe_str(full_repo_path), create=False,
4945 repo_path=safe_str(full_repo_path), create=False,
4937 _vcs_alias=GistModel.vcs_backend)
4946 _vcs_alias=GistModel.vcs_backend)
4938
4947
4939
4948
4940 class ExternalIdentity(Base, BaseModel):
4949 class ExternalIdentity(Base, BaseModel):
4941 __tablename__ = 'external_identities'
4950 __tablename__ = 'external_identities'
4942 __table_args__ = (
4951 __table_args__ = (
4943 Index('local_user_id_idx', 'local_user_id'),
4952 Index('local_user_id_idx', 'local_user_id'),
4944 Index('external_id_idx', 'external_id'),
4953 Index('external_id_idx', 'external_id'),
4945 base_table_args
4954 base_table_args
4946 )
4955 )
4947
4956
4948 external_id = Column('external_id', Unicode(255), default='', primary_key=True)
4957 external_id = Column('external_id', Unicode(255), default='', primary_key=True)
4949 external_username = Column('external_username', Unicode(1024), default='')
4958 external_username = Column('external_username', Unicode(1024), default='')
4950 local_user_id = Column('local_user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
4959 local_user_id = Column('local_user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
4951 provider_name = Column('provider_name', Unicode(255), default='', primary_key=True)
4960 provider_name = Column('provider_name', Unicode(255), default='', primary_key=True)
4952 access_token = Column('access_token', String(1024), default='')
4961 access_token = Column('access_token', String(1024), default='')
4953 alt_token = Column('alt_token', String(1024), default='')
4962 alt_token = Column('alt_token', String(1024), default='')
4954 token_secret = Column('token_secret', String(1024), default='')
4963 token_secret = Column('token_secret', String(1024), default='')
4955
4964
4956 @classmethod
4965 @classmethod
4957 def by_external_id_and_provider(cls, external_id, provider_name, local_user_id=None):
4966 def by_external_id_and_provider(cls, external_id, provider_name, local_user_id=None):
4958 """
4967 """
4959 Returns ExternalIdentity instance based on search params
4968 Returns ExternalIdentity instance based on search params
4960
4969
4961 :param external_id:
4970 :param external_id:
4962 :param provider_name:
4971 :param provider_name:
4963 :return: ExternalIdentity
4972 :return: ExternalIdentity
4964 """
4973 """
4965 query = cls.query()
4974 query = cls.query()
4966 query = query.filter(cls.external_id == external_id)
4975 query = query.filter(cls.external_id == external_id)
4967 query = query.filter(cls.provider_name == provider_name)
4976 query = query.filter(cls.provider_name == provider_name)
4968 if local_user_id:
4977 if local_user_id:
4969 query = query.filter(cls.local_user_id == local_user_id)
4978 query = query.filter(cls.local_user_id == local_user_id)
4970 return query.first()
4979 return query.first()
4971
4980
4972 @classmethod
4981 @classmethod
4973 def user_by_external_id_and_provider(cls, external_id, provider_name):
4982 def user_by_external_id_and_provider(cls, external_id, provider_name):
4974 """
4983 """
4975 Returns User instance based on search params
4984 Returns User instance based on search params
4976
4985
4977 :param external_id:
4986 :param external_id:
4978 :param provider_name:
4987 :param provider_name:
4979 :return: User
4988 :return: User
4980 """
4989 """
4981 query = User.query()
4990 query = User.query()
4982 query = query.filter(cls.external_id == external_id)
4991 query = query.filter(cls.external_id == external_id)
4983 query = query.filter(cls.provider_name == provider_name)
4992 query = query.filter(cls.provider_name == provider_name)
4984 query = query.filter(User.user_id == cls.local_user_id)
4993 query = query.filter(User.user_id == cls.local_user_id)
4985 return query.first()
4994 return query.first()
4986
4995
4987 @classmethod
4996 @classmethod
4988 def by_local_user_id(cls, local_user_id):
4997 def by_local_user_id(cls, local_user_id):
4989 """
4998 """
4990 Returns all tokens for user
4999 Returns all tokens for user
4991
5000
4992 :param local_user_id:
5001 :param local_user_id:
4993 :return: ExternalIdentity
5002 :return: ExternalIdentity
4994 """
5003 """
4995 query = cls.query()
5004 query = cls.query()
4996 query = query.filter(cls.local_user_id == local_user_id)
5005 query = query.filter(cls.local_user_id == local_user_id)
4997 return query
5006 return query
4998
5007
4999 @classmethod
5008 @classmethod
5000 def load_provider_plugin(cls, plugin_id):
5009 def load_provider_plugin(cls, plugin_id):
5001 from rhodecode.authentication.base import loadplugin
5010 from rhodecode.authentication.base import loadplugin
5002 _plugin_id = 'egg:rhodecode-enterprise-ee#{}'.format(plugin_id)
5011 _plugin_id = 'egg:rhodecode-enterprise-ee#{}'.format(plugin_id)
5003 auth_plugin = loadplugin(_plugin_id)
5012 auth_plugin = loadplugin(_plugin_id)
5004 return auth_plugin
5013 return auth_plugin
5005
5014
5006
5015
5007 class Integration(Base, BaseModel):
5016 class Integration(Base, BaseModel):
5008 __tablename__ = 'integrations'
5017 __tablename__ = 'integrations'
5009 __table_args__ = (
5018 __table_args__ = (
5010 base_table_args
5019 base_table_args
5011 )
5020 )
5012
5021
5013 integration_id = Column('integration_id', Integer(), primary_key=True)
5022 integration_id = Column('integration_id', Integer(), primary_key=True)
5014 integration_type = Column('integration_type', String(255))
5023 integration_type = Column('integration_type', String(255))
5015 enabled = Column('enabled', Boolean(), nullable=False)
5024 enabled = Column('enabled', Boolean(), nullable=False)
5016 name = Column('name', String(255), nullable=False)
5025 name = Column('name', String(255), nullable=False)
5017 child_repos_only = Column('child_repos_only', Boolean(), nullable=False, default=False)
5026 child_repos_only = Column('child_repos_only', Boolean(), nullable=False, default=False)
5018
5027
5019 settings = Column(
5028 settings = Column(
5020 'settings_json', MutationObj.as_mutable(
5029 'settings_json', MutationObj.as_mutable(
5021 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
5030 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
5022 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
5031 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
5023 repo = relationship('Repository', lazy='joined', back_populates='integrations')
5032 repo = relationship('Repository', lazy='joined', back_populates='integrations')
5024
5033
5025 repo_group_id = Column('repo_group_id', Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
5034 repo_group_id = Column('repo_group_id', Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
5026 repo_group = relationship('RepoGroup', lazy='joined', back_populates='integrations')
5035 repo_group = relationship('RepoGroup', lazy='joined', back_populates='integrations')
5027
5036
5028 @property
5037 @property
5029 def scope(self):
5038 def scope(self):
5030 if self.repo:
5039 if self.repo:
5031 return repr(self.repo)
5040 return repr(self.repo)
5032 if self.repo_group:
5041 if self.repo_group:
5033 if self.child_repos_only:
5042 if self.child_repos_only:
5034 return repr(self.repo_group) + ' (child repos only)'
5043 return repr(self.repo_group) + ' (child repos only)'
5035 else:
5044 else:
5036 return repr(self.repo_group) + ' (recursive)'
5045 return repr(self.repo_group) + ' (recursive)'
5037 if self.child_repos_only:
5046 if self.child_repos_only:
5038 return 'root_repos'
5047 return 'root_repos'
5039 return 'global'
5048 return 'global'
5040
5049
5041 def __repr__(self):
5050 def __repr__(self):
5042 return '<Integration(%r, %r)>' % (self.integration_type, self.scope)
5051 return '<Integration(%r, %r)>' % (self.integration_type, self.scope)
5043
5052
5044
5053
5045 class RepoReviewRuleUser(Base, BaseModel):
5054 class RepoReviewRuleUser(Base, BaseModel):
5046 __tablename__ = 'repo_review_rules_users'
5055 __tablename__ = 'repo_review_rules_users'
5047 __table_args__ = (
5056 __table_args__ = (
5048 base_table_args
5057 base_table_args
5049 )
5058 )
5050 ROLE_REVIEWER = 'reviewer'
5059 ROLE_REVIEWER = 'reviewer'
5051 ROLE_OBSERVER = 'observer'
5060 ROLE_OBSERVER = 'observer'
5052 ROLES = [ROLE_REVIEWER, ROLE_OBSERVER]
5061 ROLES = [ROLE_REVIEWER, ROLE_OBSERVER]
5053
5062
5054 repo_review_rule_user_id = Column('repo_review_rule_user_id', Integer(), primary_key=True)
5063 repo_review_rule_user_id = Column('repo_review_rule_user_id', Integer(), primary_key=True)
5055 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
5064 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
5056 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False)
5065 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False)
5057 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
5066 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
5058 role = Column('role', Unicode(255), nullable=True, default=ROLE_REVIEWER)
5067 role = Column('role', Unicode(255), nullable=True, default=ROLE_REVIEWER)
5059 user = relationship('User', back_populates='user_review_rules')
5068 user = relationship('User', back_populates='user_review_rules')
5060
5069
5061 def rule_data(self):
5070 def rule_data(self):
5062 return {
5071 return {
5063 'mandatory': self.mandatory,
5072 'mandatory': self.mandatory,
5064 'role': self.role,
5073 'role': self.role,
5065 }
5074 }
5066
5075
5067
5076
5068 class RepoReviewRuleUserGroup(Base, BaseModel):
5077 class RepoReviewRuleUserGroup(Base, BaseModel):
5069 __tablename__ = 'repo_review_rules_users_groups'
5078 __tablename__ = 'repo_review_rules_users_groups'
5070 __table_args__ = (
5079 __table_args__ = (
5071 base_table_args
5080 base_table_args
5072 )
5081 )
5073
5082
5074 VOTE_RULE_ALL = -1
5083 VOTE_RULE_ALL = -1
5075 ROLE_REVIEWER = 'reviewer'
5084 ROLE_REVIEWER = 'reviewer'
5076 ROLE_OBSERVER = 'observer'
5085 ROLE_OBSERVER = 'observer'
5077 ROLES = [ROLE_REVIEWER, ROLE_OBSERVER]
5086 ROLES = [ROLE_REVIEWER, ROLE_OBSERVER]
5078
5087
5079 repo_review_rule_users_group_id = Column('repo_review_rule_users_group_id', Integer(), primary_key=True)
5088 repo_review_rule_users_group_id = Column('repo_review_rule_users_group_id', Integer(), primary_key=True)
5080 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
5089 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
5081 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False)
5090 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False)
5082 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
5091 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
5083 role = Column('role', Unicode(255), nullable=True, default=ROLE_REVIEWER)
5092 role = Column('role', Unicode(255), nullable=True, default=ROLE_REVIEWER)
5084 vote_rule = Column("vote_rule", Integer(), nullable=True, default=VOTE_RULE_ALL)
5093 vote_rule = Column("vote_rule", Integer(), nullable=True, default=VOTE_RULE_ALL)
5085 users_group = relationship('UserGroup')
5094 users_group = relationship('UserGroup')
5086
5095
5087 def rule_data(self):
5096 def rule_data(self):
5088 return {
5097 return {
5089 'mandatory': self.mandatory,
5098 'mandatory': self.mandatory,
5090 'role': self.role,
5099 'role': self.role,
5091 'vote_rule': self.vote_rule
5100 'vote_rule': self.vote_rule
5092 }
5101 }
5093
5102
5094 @property
5103 @property
5095 def vote_rule_label(self):
5104 def vote_rule_label(self):
5096 if not self.vote_rule or self.vote_rule == self.VOTE_RULE_ALL:
5105 if not self.vote_rule or self.vote_rule == self.VOTE_RULE_ALL:
5097 return 'all must vote'
5106 return 'all must vote'
5098 else:
5107 else:
5099 return 'min. vote {}'.format(self.vote_rule)
5108 return 'min. vote {}'.format(self.vote_rule)
5100
5109
5101
5110
5102 class RepoReviewRule(Base, BaseModel):
5111 class RepoReviewRule(Base, BaseModel):
5103 __tablename__ = 'repo_review_rules'
5112 __tablename__ = 'repo_review_rules'
5104 __table_args__ = (
5113 __table_args__ = (
5105 base_table_args
5114 base_table_args
5106 )
5115 )
5107
5116
5108 repo_review_rule_id = Column(
5117 repo_review_rule_id = Column(
5109 'repo_review_rule_id', Integer(), primary_key=True)
5118 'repo_review_rule_id', Integer(), primary_key=True)
5110 repo_id = Column(
5119 repo_id = Column(
5111 "repo_id", Integer(), ForeignKey('repositories.repo_id'))
5120 "repo_id", Integer(), ForeignKey('repositories.repo_id'))
5112 repo = relationship('Repository', back_populates='review_rules')
5121 repo = relationship('Repository', back_populates='review_rules')
5113
5122
5114 review_rule_name = Column('review_rule_name', String(255))
5123 review_rule_name = Column('review_rule_name', String(255))
5115 _branch_pattern = Column("branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default='*') # glob
5124 _branch_pattern = Column("branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default='*') # glob
5116 _target_branch_pattern = Column("target_branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default='*') # glob
5125 _target_branch_pattern = Column("target_branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default='*') # glob
5117 _file_pattern = Column("file_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default='*') # glob
5126 _file_pattern = Column("file_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default='*') # glob
5118
5127
5119 use_authors_for_review = Column("use_authors_for_review", Boolean(), nullable=False, default=False)
5128 use_authors_for_review = Column("use_authors_for_review", Boolean(), nullable=False, default=False)
5120
5129
5121 # Legacy fields, just for backward compat
5130 # Legacy fields, just for backward compat
5122 _forbid_author_to_review = Column("forbid_author_to_review", Boolean(), nullable=False, default=False)
5131 _forbid_author_to_review = Column("forbid_author_to_review", Boolean(), nullable=False, default=False)
5123 _forbid_commit_author_to_review = Column("forbid_commit_author_to_review", Boolean(), nullable=False, default=False)
5132 _forbid_commit_author_to_review = Column("forbid_commit_author_to_review", Boolean(), nullable=False, default=False)
5124
5133
5125 pr_author = Column("pr_author", UnicodeText().with_variant(UnicodeText(255), 'mysql'), nullable=True)
5134 pr_author = Column("pr_author", UnicodeText().with_variant(UnicodeText(255), 'mysql'), nullable=True)
5126 commit_author = Column("commit_author", UnicodeText().with_variant(UnicodeText(255), 'mysql'), nullable=True)
5135 commit_author = Column("commit_author", UnicodeText().with_variant(UnicodeText(255), 'mysql'), nullable=True)
5127
5136
5128 forbid_adding_reviewers = Column("forbid_adding_reviewers", Boolean(), nullable=False, default=False)
5137 forbid_adding_reviewers = Column("forbid_adding_reviewers", Boolean(), nullable=False, default=False)
5129
5138
5130 rule_users = relationship('RepoReviewRuleUser')
5139 rule_users = relationship('RepoReviewRuleUser')
5131 rule_user_groups = relationship('RepoReviewRuleUserGroup')
5140 rule_user_groups = relationship('RepoReviewRuleUserGroup')
5132
5141
5133 def _validate_pattern(self, value):
5142 def _validate_pattern(self, value):
5134 re.compile('^' + glob2re(value) + '$')
5143 re.compile('^' + glob2re(value) + '$')
5135
5144
5136 @hybrid_property
5145 @hybrid_property
5137 def source_branch_pattern(self):
5146 def source_branch_pattern(self):
5138 return self._branch_pattern or '*'
5147 return self._branch_pattern or '*'
5139
5148
5140 @source_branch_pattern.setter
5149 @source_branch_pattern.setter
5141 def source_branch_pattern(self, value):
5150 def source_branch_pattern(self, value):
5142 self._validate_pattern(value)
5151 self._validate_pattern(value)
5143 self._branch_pattern = value or '*'
5152 self._branch_pattern = value or '*'
5144
5153
5145 @hybrid_property
5154 @hybrid_property
5146 def target_branch_pattern(self):
5155 def target_branch_pattern(self):
5147 return self._target_branch_pattern or '*'
5156 return self._target_branch_pattern or '*'
5148
5157
5149 @target_branch_pattern.setter
5158 @target_branch_pattern.setter
5150 def target_branch_pattern(self, value):
5159 def target_branch_pattern(self, value):
5151 self._validate_pattern(value)
5160 self._validate_pattern(value)
5152 self._target_branch_pattern = value or '*'
5161 self._target_branch_pattern = value or '*'
5153
5162
5154 @hybrid_property
5163 @hybrid_property
5155 def file_pattern(self):
5164 def file_pattern(self):
5156 return self._file_pattern or '*'
5165 return self._file_pattern or '*'
5157
5166
5158 @file_pattern.setter
5167 @file_pattern.setter
5159 def file_pattern(self, value):
5168 def file_pattern(self, value):
5160 self._validate_pattern(value)
5169 self._validate_pattern(value)
5161 self._file_pattern = value or '*'
5170 self._file_pattern = value or '*'
5162
5171
5163 @hybrid_property
5172 @hybrid_property
5164 def forbid_pr_author_to_review(self):
5173 def forbid_pr_author_to_review(self):
5165 return self.pr_author == 'forbid_pr_author'
5174 return self.pr_author == 'forbid_pr_author'
5166
5175
5167 @hybrid_property
5176 @hybrid_property
5168 def include_pr_author_to_review(self):
5177 def include_pr_author_to_review(self):
5169 return self.pr_author == 'include_pr_author'
5178 return self.pr_author == 'include_pr_author'
5170
5179
5171 @hybrid_property
5180 @hybrid_property
5172 def forbid_commit_author_to_review(self):
5181 def forbid_commit_author_to_review(self):
5173 return self.commit_author == 'forbid_commit_author'
5182 return self.commit_author == 'forbid_commit_author'
5174
5183
5175 @hybrid_property
5184 @hybrid_property
5176 def include_commit_author_to_review(self):
5185 def include_commit_author_to_review(self):
5177 return self.commit_author == 'include_commit_author'
5186 return self.commit_author == 'include_commit_author'
5178
5187
5179 def matches(self, source_branch, target_branch, files_changed):
5188 def matches(self, source_branch, target_branch, files_changed):
5180 """
5189 """
5181 Check if this review rule matches a branch/files in a pull request
5190 Check if this review rule matches a branch/files in a pull request
5182
5191
5183 :param source_branch: source branch name for the commit
5192 :param source_branch: source branch name for the commit
5184 :param target_branch: target branch name for the commit
5193 :param target_branch: target branch name for the commit
5185 :param files_changed: list of file paths changed in the pull request
5194 :param files_changed: list of file paths changed in the pull request
5186 """
5195 """
5187
5196
5188 source_branch = source_branch or ''
5197 source_branch = source_branch or ''
5189 target_branch = target_branch or ''
5198 target_branch = target_branch or ''
5190 files_changed = files_changed or []
5199 files_changed = files_changed or []
5191
5200
5192 branch_matches = True
5201 branch_matches = True
5193 if source_branch or target_branch:
5202 if source_branch or target_branch:
5194 if self.source_branch_pattern == '*':
5203 if self.source_branch_pattern == '*':
5195 source_branch_match = True
5204 source_branch_match = True
5196 else:
5205 else:
5197 if self.source_branch_pattern.startswith('re:'):
5206 if self.source_branch_pattern.startswith('re:'):
5198 source_pattern = self.source_branch_pattern[3:]
5207 source_pattern = self.source_branch_pattern[3:]
5199 else:
5208 else:
5200 source_pattern = '^' + glob2re(self.source_branch_pattern) + '$'
5209 source_pattern = '^' + glob2re(self.source_branch_pattern) + '$'
5201 source_branch_regex = re.compile(source_pattern)
5210 source_branch_regex = re.compile(source_pattern)
5202 source_branch_match = bool(source_branch_regex.search(source_branch))
5211 source_branch_match = bool(source_branch_regex.search(source_branch))
5203 if self.target_branch_pattern == '*':
5212 if self.target_branch_pattern == '*':
5204 target_branch_match = True
5213 target_branch_match = True
5205 else:
5214 else:
5206 if self.target_branch_pattern.startswith('re:'):
5215 if self.target_branch_pattern.startswith('re:'):
5207 target_pattern = self.target_branch_pattern[3:]
5216 target_pattern = self.target_branch_pattern[3:]
5208 else:
5217 else:
5209 target_pattern = '^' + glob2re(self.target_branch_pattern) + '$'
5218 target_pattern = '^' + glob2re(self.target_branch_pattern) + '$'
5210 target_branch_regex = re.compile(target_pattern)
5219 target_branch_regex = re.compile(target_pattern)
5211 target_branch_match = bool(target_branch_regex.search(target_branch))
5220 target_branch_match = bool(target_branch_regex.search(target_branch))
5212
5221
5213 branch_matches = source_branch_match and target_branch_match
5222 branch_matches = source_branch_match and target_branch_match
5214
5223
5215 files_matches = True
5224 files_matches = True
5216 if self.file_pattern != '*':
5225 if self.file_pattern != '*':
5217 files_matches = False
5226 files_matches = False
5218 if self.file_pattern.startswith('re:'):
5227 if self.file_pattern.startswith('re:'):
5219 file_pattern = self.file_pattern[3:]
5228 file_pattern = self.file_pattern[3:]
5220 else:
5229 else:
5221 file_pattern = glob2re(self.file_pattern)
5230 file_pattern = glob2re(self.file_pattern)
5222 file_regex = re.compile(file_pattern)
5231 file_regex = re.compile(file_pattern)
5223 for file_data in files_changed:
5232 for file_data in files_changed:
5224 filename = file_data.get('filename')
5233 filename = file_data.get('filename')
5225
5234
5226 if file_regex.search(filename):
5235 if file_regex.search(filename):
5227 files_matches = True
5236 files_matches = True
5228 break
5237 break
5229
5238
5230 return branch_matches and files_matches
5239 return branch_matches and files_matches
5231
5240
5232 @property
5241 @property
5233 def review_users(self):
5242 def review_users(self):
5234 """ Returns the users which this rule applies to """
5243 """ Returns the users which this rule applies to """
5235
5244
5236 users = collections.OrderedDict()
5245 users = collections.OrderedDict()
5237
5246
5238 for rule_user in self.rule_users:
5247 for rule_user in self.rule_users:
5239 if rule_user.user.active:
5248 if rule_user.user.active:
5240 if rule_user.user not in users:
5249 if rule_user.user not in users:
5241 users[rule_user.user.username] = {
5250 users[rule_user.user.username] = {
5242 'user': rule_user.user,
5251 'user': rule_user.user,
5243 'source': 'user',
5252 'source': 'user',
5244 'source_data': {},
5253 'source_data': {},
5245 'data': rule_user.rule_data()
5254 'data': rule_user.rule_data()
5246 }
5255 }
5247
5256
5248 for rule_user_group in self.rule_user_groups:
5257 for rule_user_group in self.rule_user_groups:
5249 source_data = {
5258 source_data = {
5250 'user_group_id': rule_user_group.users_group.users_group_id,
5259 'user_group_id': rule_user_group.users_group.users_group_id,
5251 'name': rule_user_group.users_group.users_group_name,
5260 'name': rule_user_group.users_group.users_group_name,
5252 'members': len(rule_user_group.users_group.members)
5261 'members': len(rule_user_group.users_group.members)
5253 }
5262 }
5254 for member in rule_user_group.users_group.members:
5263 for member in rule_user_group.users_group.members:
5255 if member.user.active:
5264 if member.user.active:
5256 key = member.user.username
5265 key = member.user.username
5257 if key in users:
5266 if key in users:
5258 # skip this member as we have him already
5267 # skip this member as we have him already
5259 # this prevents from override the "first" matched
5268 # this prevents from override the "first" matched
5260 # users with duplicates in multiple groups
5269 # users with duplicates in multiple groups
5261 continue
5270 continue
5262
5271
5263 users[key] = {
5272 users[key] = {
5264 'user': member.user,
5273 'user': member.user,
5265 'source': 'user_group',
5274 'source': 'user_group',
5266 'source_data': source_data,
5275 'source_data': source_data,
5267 'data': rule_user_group.rule_data()
5276 'data': rule_user_group.rule_data()
5268 }
5277 }
5269
5278
5270 return users
5279 return users
5271
5280
5272 def user_group_vote_rule(self, user_id):
5281 def user_group_vote_rule(self, user_id):
5273
5282
5274 rules = []
5283 rules = []
5275 if not self.rule_user_groups:
5284 if not self.rule_user_groups:
5276 return rules
5285 return rules
5277
5286
5278 for user_group in self.rule_user_groups:
5287 for user_group in self.rule_user_groups:
5279 user_group_members = [x.user_id for x in user_group.users_group.members]
5288 user_group_members = [x.user_id for x in user_group.users_group.members]
5280 if user_id in user_group_members:
5289 if user_id in user_group_members:
5281 rules.append(user_group)
5290 rules.append(user_group)
5282 return rules
5291 return rules
5283
5292
5284 def __repr__(self):
5293 def __repr__(self):
5285 return f'<RepoReviewerRule(id={self.repo_review_rule_id}, repo={self.repo!r})>'
5294 return f'<RepoReviewerRule(id={self.repo_review_rule_id}, repo={self.repo!r})>'
5286
5295
5287
5296
5288 class ScheduleEntry(Base, BaseModel):
5297 class ScheduleEntry(Base, BaseModel):
5289 __tablename__ = 'schedule_entries'
5298 __tablename__ = 'schedule_entries'
5290 __table_args__ = (
5299 __table_args__ = (
5291 UniqueConstraint('schedule_name', name='s_schedule_name_idx'),
5300 UniqueConstraint('schedule_name', name='s_schedule_name_idx'),
5292 UniqueConstraint('task_uid', name='s_task_uid_idx'),
5301 UniqueConstraint('task_uid', name='s_task_uid_idx'),
5293 base_table_args,
5302 base_table_args,
5294 )
5303 )
5295 SCHEDULE_TYPE_INTEGER = "integer"
5304 SCHEDULE_TYPE_INTEGER = "integer"
5296 SCHEDULE_TYPE_CRONTAB = "crontab"
5305 SCHEDULE_TYPE_CRONTAB = "crontab"
5297
5306
5298 schedule_types = [SCHEDULE_TYPE_CRONTAB, SCHEDULE_TYPE_INTEGER]
5307 schedule_types = [SCHEDULE_TYPE_CRONTAB, SCHEDULE_TYPE_INTEGER]
5299 schedule_entry_id = Column('schedule_entry_id', Integer(), primary_key=True)
5308 schedule_entry_id = Column('schedule_entry_id', Integer(), primary_key=True)
5300
5309
5301 schedule_name = Column("schedule_name", String(255), nullable=False, unique=None, default=None)
5310 schedule_name = Column("schedule_name", String(255), nullable=False, unique=None, default=None)
5302 schedule_description = Column("schedule_description", String(10000), nullable=True, unique=None, default=None)
5311 schedule_description = Column("schedule_description", String(10000), nullable=True, unique=None, default=None)
5303 schedule_enabled = Column("schedule_enabled", Boolean(), nullable=False, unique=None, default=True)
5312 schedule_enabled = Column("schedule_enabled", Boolean(), nullable=False, unique=None, default=True)
5304
5313
5305 _schedule_type = Column("schedule_type", String(255), nullable=False, unique=None, default=None)
5314 _schedule_type = Column("schedule_type", String(255), nullable=False, unique=None, default=None)
5306 schedule_definition = Column('schedule_definition_json', MutationObj.as_mutable(JsonType(default=lambda: "", dialect_map=dict(mysql=LONGTEXT()))))
5315 schedule_definition = Column('schedule_definition_json', MutationObj.as_mutable(JsonType(default=lambda: "", dialect_map=dict(mysql=LONGTEXT()))))
5307
5316
5308 schedule_last_run = Column('schedule_last_run', DateTime(timezone=False), nullable=True, unique=None, default=None)
5317 schedule_last_run = Column('schedule_last_run', DateTime(timezone=False), nullable=True, unique=None, default=None)
5309 schedule_total_run_count = Column('schedule_total_run_count', Integer(), nullable=True, unique=None, default=0)
5318 schedule_total_run_count = Column('schedule_total_run_count', Integer(), nullable=True, unique=None, default=0)
5310
5319
5311 # task
5320 # task
5312 task_uid = Column("task_uid", String(255), nullable=False, unique=None, default=None)
5321 task_uid = Column("task_uid", String(255), nullable=False, unique=None, default=None)
5313 task_dot_notation = Column("task_dot_notation", String(4096), nullable=False, unique=None, default=None)
5322 task_dot_notation = Column("task_dot_notation", String(4096), nullable=False, unique=None, default=None)
5314 task_args = Column('task_args_json', MutationObj.as_mutable(JsonType(default=list, dialect_map=dict(mysql=LONGTEXT()))))
5323 task_args = Column('task_args_json', MutationObj.as_mutable(JsonType(default=list, dialect_map=dict(mysql=LONGTEXT()))))
5315 task_kwargs = Column('task_kwargs_json', MutationObj.as_mutable(JsonType(default=dict, dialect_map=dict(mysql=LONGTEXT()))))
5324 task_kwargs = Column('task_kwargs_json', MutationObj.as_mutable(JsonType(default=dict, dialect_map=dict(mysql=LONGTEXT()))))
5316
5325
5317 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5326 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5318 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=None)
5327 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=None)
5319
5328
5320 @hybrid_property
5329 @hybrid_property
5321 def schedule_type(self):
5330 def schedule_type(self):
5322 return self._schedule_type
5331 return self._schedule_type
5323
5332
5324 @schedule_type.setter
5333 @schedule_type.setter
5325 def schedule_type(self, val):
5334 def schedule_type(self, val):
5326 if val not in self.schedule_types:
5335 if val not in self.schedule_types:
5327 raise ValueError('Value must be on of `{}` and got `{}`'.format(
5336 raise ValueError('Value must be on of `{}` and got `{}`'.format(
5328 val, self.schedule_type))
5337 val, self.schedule_type))
5329
5338
5330 self._schedule_type = val
5339 self._schedule_type = val
5331
5340
5332 @classmethod
5341 @classmethod
5333 def get_uid(cls, obj):
5342 def get_uid(cls, obj):
5334 args = obj.task_args
5343 args = obj.task_args
5335 kwargs = obj.task_kwargs
5344 kwargs = obj.task_kwargs
5336 if isinstance(args, JsonRaw):
5345 if isinstance(args, JsonRaw):
5337 try:
5346 try:
5338 args = json.loads(args)
5347 args = json.loads(args)
5339 except ValueError:
5348 except ValueError:
5340 args = tuple()
5349 args = tuple()
5341
5350
5342 if isinstance(kwargs, JsonRaw):
5351 if isinstance(kwargs, JsonRaw):
5343 try:
5352 try:
5344 kwargs = json.loads(kwargs)
5353 kwargs = json.loads(kwargs)
5345 except ValueError:
5354 except ValueError:
5346 kwargs = dict()
5355 kwargs = dict()
5347
5356
5348 dot_notation = obj.task_dot_notation
5357 dot_notation = obj.task_dot_notation
5349 val = '.'.join(map(safe_str, [
5358 val = '.'.join(map(safe_str, [
5350 sorted(dot_notation), args, sorted(kwargs.items())]))
5359 sorted(dot_notation), args, sorted(kwargs.items())]))
5351 return sha1(safe_bytes(val))
5360 return sha1(safe_bytes(val))
5352
5361
5353 @classmethod
5362 @classmethod
5354 def get_by_schedule_name(cls, schedule_name):
5363 def get_by_schedule_name(cls, schedule_name):
5355 return cls.query().filter(cls.schedule_name == schedule_name).scalar()
5364 return cls.query().filter(cls.schedule_name == schedule_name).scalar()
5356
5365
5357 @classmethod
5366 @classmethod
5358 def get_by_schedule_id(cls, schedule_id):
5367 def get_by_schedule_id(cls, schedule_id):
5359 return cls.query().filter(cls.schedule_entry_id == schedule_id).scalar()
5368 return cls.query().filter(cls.schedule_entry_id == schedule_id).scalar()
5360
5369
5361 @property
5370 @property
5362 def task(self):
5371 def task(self):
5363 return self.task_dot_notation
5372 return self.task_dot_notation
5364
5373
5365 @property
5374 @property
5366 def schedule(self):
5375 def schedule(self):
5367 from rhodecode.lib.celerylib.utils import raw_2_schedule
5376 from rhodecode.lib.celerylib.utils import raw_2_schedule
5368 schedule = raw_2_schedule(self.schedule_definition, self.schedule_type)
5377 schedule = raw_2_schedule(self.schedule_definition, self.schedule_type)
5369 return schedule
5378 return schedule
5370
5379
5371 @property
5380 @property
5372 def args(self):
5381 def args(self):
5373 try:
5382 try:
5374 return list(self.task_args or [])
5383 return list(self.task_args or [])
5375 except ValueError:
5384 except ValueError:
5376 return list()
5385 return list()
5377
5386
5378 @property
5387 @property
5379 def kwargs(self):
5388 def kwargs(self):
5380 try:
5389 try:
5381 return dict(self.task_kwargs or {})
5390 return dict(self.task_kwargs or {})
5382 except ValueError:
5391 except ValueError:
5383 return dict()
5392 return dict()
5384
5393
5385 def _as_raw(self, val, indent=False):
5394 def _as_raw(self, val, indent=False):
5386 if hasattr(val, 'de_coerce'):
5395 if hasattr(val, 'de_coerce'):
5387 val = val.de_coerce()
5396 val = val.de_coerce()
5388 if val:
5397 if val:
5389 if indent:
5398 if indent:
5390 val = ext_json.formatted_str_json(val)
5399 val = ext_json.formatted_str_json(val)
5391 else:
5400 else:
5392 val = ext_json.str_json(val)
5401 val = ext_json.str_json(val)
5393
5402
5394 return val
5403 return val
5395
5404
5396 @property
5405 @property
5397 def schedule_definition_raw(self):
5406 def schedule_definition_raw(self):
5398 return self._as_raw(self.schedule_definition)
5407 return self._as_raw(self.schedule_definition)
5399
5408
5400 def args_raw(self, indent=False):
5409 def args_raw(self, indent=False):
5401 return self._as_raw(self.task_args, indent)
5410 return self._as_raw(self.task_args, indent)
5402
5411
5403 def kwargs_raw(self, indent=False):
5412 def kwargs_raw(self, indent=False):
5404 return self._as_raw(self.task_kwargs, indent)
5413 return self._as_raw(self.task_kwargs, indent)
5405
5414
5406 def __repr__(self):
5415 def __repr__(self):
5407 return f'<DB:ScheduleEntry({self.schedule_entry_id}:{self.schedule_name})>'
5416 return f'<DB:ScheduleEntry({self.schedule_entry_id}:{self.schedule_name})>'
5408
5417
5409
5418
5410 @event.listens_for(ScheduleEntry, 'before_update')
5419 @event.listens_for(ScheduleEntry, 'before_update')
5411 def update_task_uid(mapper, connection, target):
5420 def update_task_uid(mapper, connection, target):
5412 target.task_uid = ScheduleEntry.get_uid(target)
5421 target.task_uid = ScheduleEntry.get_uid(target)
5413
5422
5414
5423
5415 @event.listens_for(ScheduleEntry, 'before_insert')
5424 @event.listens_for(ScheduleEntry, 'before_insert')
5416 def set_task_uid(mapper, connection, target):
5425 def set_task_uid(mapper, connection, target):
5417 target.task_uid = ScheduleEntry.get_uid(target)
5426 target.task_uid = ScheduleEntry.get_uid(target)
5418
5427
5419
5428
5420 class _BaseBranchPerms(BaseModel):
5429 class _BaseBranchPerms(BaseModel):
5421 @classmethod
5430 @classmethod
5422 def compute_hash(cls, value):
5431 def compute_hash(cls, value):
5423 return sha1_safe(value)
5432 return sha1_safe(value)
5424
5433
5425 @hybrid_property
5434 @hybrid_property
5426 def branch_pattern(self):
5435 def branch_pattern(self):
5427 return self._branch_pattern or '*'
5436 return self._branch_pattern or '*'
5428
5437
5429 @hybrid_property
5438 @hybrid_property
5430 def branch_hash(self):
5439 def branch_hash(self):
5431 return self._branch_hash
5440 return self._branch_hash
5432
5441
5433 def _validate_glob(self, value):
5442 def _validate_glob(self, value):
5434 re.compile('^' + glob2re(value) + '$')
5443 re.compile('^' + glob2re(value) + '$')
5435
5444
5436 @branch_pattern.setter
5445 @branch_pattern.setter
5437 def branch_pattern(self, value):
5446 def branch_pattern(self, value):
5438 self._validate_glob(value)
5447 self._validate_glob(value)
5439 self._branch_pattern = value or '*'
5448 self._branch_pattern = value or '*'
5440 # set the Hash when setting the branch pattern
5449 # set the Hash when setting the branch pattern
5441 self._branch_hash = self.compute_hash(self._branch_pattern)
5450 self._branch_hash = self.compute_hash(self._branch_pattern)
5442
5451
5443 def matches(self, branch):
5452 def matches(self, branch):
5444 """
5453 """
5445 Check if this the branch matches entry
5454 Check if this the branch matches entry
5446
5455
5447 :param branch: branch name for the commit
5456 :param branch: branch name for the commit
5448 """
5457 """
5449
5458
5450 branch = branch or ''
5459 branch = branch or ''
5451
5460
5452 branch_matches = True
5461 branch_matches = True
5453 if branch:
5462 if branch:
5454 branch_regex = re.compile('^' + glob2re(self.branch_pattern) + '$')
5463 branch_regex = re.compile('^' + glob2re(self.branch_pattern) + '$')
5455 branch_matches = bool(branch_regex.search(branch))
5464 branch_matches = bool(branch_regex.search(branch))
5456
5465
5457 return branch_matches
5466 return branch_matches
5458
5467
5459
5468
5460 class UserToRepoBranchPermission(Base, _BaseBranchPerms):
5469 class UserToRepoBranchPermission(Base, _BaseBranchPerms):
5461 __tablename__ = 'user_to_repo_branch_permissions'
5470 __tablename__ = 'user_to_repo_branch_permissions'
5462 __table_args__ = (
5471 __table_args__ = (
5463 base_table_args
5472 base_table_args
5464 )
5473 )
5465
5474
5466 branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True)
5475 branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True)
5467
5476
5468 repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
5477 repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
5469 repo = relationship('Repository', back_populates='user_branch_perms')
5478 repo = relationship('Repository', back_populates='user_branch_perms')
5470
5479
5471 permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
5480 permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
5472 permission = relationship('Permission')
5481 permission = relationship('Permission')
5473
5482
5474 rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('repo_to_perm.repo_to_perm_id'), nullable=False, unique=None, default=None)
5483 rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('repo_to_perm.repo_to_perm_id'), nullable=False, unique=None, default=None)
5475 user_repo_to_perm = relationship('UserRepoToPerm', back_populates='branch_perm_entry')
5484 user_repo_to_perm = relationship('UserRepoToPerm', back_populates='branch_perm_entry')
5476
5485
5477 rule_order = Column('rule_order', Integer(), nullable=False)
5486 rule_order = Column('rule_order', Integer(), nullable=False)
5478 _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default='*') # glob
5487 _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default='*') # glob
5479 _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql'))
5488 _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql'))
5480
5489
5481 def __repr__(self):
5490 def __repr__(self):
5482 return f'<UserBranchPermission({self.user_repo_to_perm} => {self.branch_pattern!r})>'
5491 return f'<UserBranchPermission({self.user_repo_to_perm} => {self.branch_pattern!r})>'
5483
5492
5484
5493
5485 class UserGroupToRepoBranchPermission(Base, _BaseBranchPerms):
5494 class UserGroupToRepoBranchPermission(Base, _BaseBranchPerms):
5486 __tablename__ = 'user_group_to_repo_branch_permissions'
5495 __tablename__ = 'user_group_to_repo_branch_permissions'
5487 __table_args__ = (
5496 __table_args__ = (
5488 base_table_args
5497 base_table_args
5489 )
5498 )
5490
5499
5491 branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True)
5500 branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True)
5492
5501
5493 repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
5502 repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
5494 repo = relationship('Repository', back_populates='user_group_branch_perms')
5503 repo = relationship('Repository', back_populates='user_group_branch_perms')
5495
5504
5496 permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
5505 permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
5497 permission = relationship('Permission')
5506 permission = relationship('Permission')
5498
5507
5499 rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('users_group_repo_to_perm.users_group_to_perm_id'), nullable=False, unique=None, default=None)
5508 rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('users_group_repo_to_perm.users_group_to_perm_id'), nullable=False, unique=None, default=None)
5500 user_group_repo_to_perm = relationship('UserGroupRepoToPerm', back_populates='user_group_branch_perms')
5509 user_group_repo_to_perm = relationship('UserGroupRepoToPerm', back_populates='user_group_branch_perms')
5501
5510
5502 rule_order = Column('rule_order', Integer(), nullable=False)
5511 rule_order = Column('rule_order', Integer(), nullable=False)
5503 _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default='*') # glob
5512 _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default='*') # glob
5504 _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql'))
5513 _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql'))
5505
5514
5506 def __repr__(self):
5515 def __repr__(self):
5507 return f'<UserBranchPermission({self.user_group_repo_to_perm} => {self.branch_pattern!r})>'
5516 return f'<UserBranchPermission({self.user_group_repo_to_perm} => {self.branch_pattern!r})>'
5508
5517
5509
5518
5510 class UserBookmark(Base, BaseModel):
5519 class UserBookmark(Base, BaseModel):
5511 __tablename__ = 'user_bookmarks'
5520 __tablename__ = 'user_bookmarks'
5512 __table_args__ = (
5521 __table_args__ = (
5513 UniqueConstraint('user_id', 'bookmark_repo_id'),
5522 UniqueConstraint('user_id', 'bookmark_repo_id'),
5514 UniqueConstraint('user_id', 'bookmark_repo_group_id'),
5523 UniqueConstraint('user_id', 'bookmark_repo_group_id'),
5515 UniqueConstraint('user_id', 'bookmark_position'),
5524 UniqueConstraint('user_id', 'bookmark_position'),
5516 base_table_args
5525 base_table_args
5517 )
5526 )
5518
5527
5519 user_bookmark_id = Column("user_bookmark_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
5528 user_bookmark_id = Column("user_bookmark_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
5520 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
5529 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
5521 position = Column("bookmark_position", Integer(), nullable=False)
5530 position = Column("bookmark_position", Integer(), nullable=False)
5522 title = Column("bookmark_title", String(255), nullable=True, unique=None, default=None)
5531 title = Column("bookmark_title", String(255), nullable=True, unique=None, default=None)
5523 redirect_url = Column("bookmark_redirect_url", String(10240), nullable=True, unique=None, default=None)
5532 redirect_url = Column("bookmark_redirect_url", String(10240), nullable=True, unique=None, default=None)
5524 created_on = Column("created_on", DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5533 created_on = Column("created_on", DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5525
5534
5526 bookmark_repo_id = Column("bookmark_repo_id", Integer(), ForeignKey("repositories.repo_id"), nullable=True, unique=None, default=None)
5535 bookmark_repo_id = Column("bookmark_repo_id", Integer(), ForeignKey("repositories.repo_id"), nullable=True, unique=None, default=None)
5527 bookmark_repo_group_id = Column("bookmark_repo_group_id", Integer(), ForeignKey("groups.group_id"), nullable=True, unique=None, default=None)
5536 bookmark_repo_group_id = Column("bookmark_repo_group_id", Integer(), ForeignKey("groups.group_id"), nullable=True, unique=None, default=None)
5528
5537
5529 user = relationship("User")
5538 user = relationship("User")
5530
5539
5531 repository = relationship("Repository")
5540 repository = relationship("Repository")
5532 repository_group = relationship("RepoGroup")
5541 repository_group = relationship("RepoGroup")
5533
5542
5534 @classmethod
5543 @classmethod
5535 def get_by_position_for_user(cls, position, user_id):
5544 def get_by_position_for_user(cls, position, user_id):
5536 return cls.query() \
5545 return cls.query() \
5537 .filter(UserBookmark.user_id == user_id) \
5546 .filter(UserBookmark.user_id == user_id) \
5538 .filter(UserBookmark.position == position).scalar()
5547 .filter(UserBookmark.position == position).scalar()
5539
5548
5540 @classmethod
5549 @classmethod
5541 def get_bookmarks_for_user(cls, user_id, cache=True):
5550 def get_bookmarks_for_user(cls, user_id, cache=True):
5542 bookmarks = cls.query() \
5551 bookmarks = cls.query() \
5543 .filter(UserBookmark.user_id == user_id) \
5552 .filter(UserBookmark.user_id == user_id) \
5544 .options(joinedload(UserBookmark.repository)) \
5553 .options(joinedload(UserBookmark.repository)) \
5545 .options(joinedload(UserBookmark.repository_group)) \
5554 .options(joinedload(UserBookmark.repository_group)) \
5546 .order_by(UserBookmark.position.asc())
5555 .order_by(UserBookmark.position.asc())
5547
5556
5548 if cache:
5557 if cache:
5549 bookmarks = bookmarks.options(
5558 bookmarks = bookmarks.options(
5550 FromCache("sql_cache_short", "get_user_{}_bookmarks".format(user_id))
5559 FromCache("sql_cache_short", "get_user_{}_bookmarks".format(user_id))
5551 )
5560 )
5552
5561
5553 return bookmarks.all()
5562 return bookmarks.all()
5554
5563
5555 def __repr__(self):
5564 def __repr__(self):
5556 return f'<UserBookmark({self.position} @ {self.redirect_url!r})>'
5565 return f'<UserBookmark({self.position} @ {self.redirect_url!r})>'
5557
5566
5558
5567
5559 class FileStore(Base, BaseModel):
5568 class FileStore(Base, BaseModel):
5560 __tablename__ = 'file_store'
5569 __tablename__ = 'file_store'
5561 __table_args__ = (
5570 __table_args__ = (
5562 base_table_args
5571 base_table_args
5563 )
5572 )
5564
5573
5565 file_store_id = Column('file_store_id', Integer(), primary_key=True)
5574 file_store_id = Column('file_store_id', Integer(), primary_key=True)
5566 file_uid = Column('file_uid', String(1024), nullable=False)
5575 file_uid = Column('file_uid', String(1024), nullable=False)
5567 file_display_name = Column('file_display_name', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), nullable=True)
5576 file_display_name = Column('file_display_name', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), nullable=True)
5568 file_description = Column('file_description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=True)
5577 file_description = Column('file_description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=True)
5569 file_org_name = Column('file_org_name', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=False)
5578 file_org_name = Column('file_org_name', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=False)
5570
5579
5571 # sha256 hash
5580 # sha256 hash
5572 file_hash = Column('file_hash', String(512), nullable=False)
5581 file_hash = Column('file_hash', String(512), nullable=False)
5573 file_size = Column('file_size', BigInteger(), nullable=False)
5582 file_size = Column('file_size', BigInteger(), nullable=False)
5574
5583
5575 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5584 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5576 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True)
5585 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True)
5577 accessed_count = Column('accessed_count', Integer(), default=0)
5586 accessed_count = Column('accessed_count', Integer(), default=0)
5578
5587
5579 enabled = Column('enabled', Boolean(), nullable=False, default=True)
5588 enabled = Column('enabled', Boolean(), nullable=False, default=True)
5580
5589
5581 # if repo/repo_group reference is set, check for permissions
5590 # if repo/repo_group reference is set, check for permissions
5582 check_acl = Column('check_acl', Boolean(), nullable=False, default=True)
5591 check_acl = Column('check_acl', Boolean(), nullable=False, default=True)
5583
5592
5584 # hidden defines an attachment that should be hidden from showing in artifact listing
5593 # hidden defines an attachment that should be hidden from showing in artifact listing
5585 hidden = Column('hidden', Boolean(), nullable=False, default=False)
5594 hidden = Column('hidden', Boolean(), nullable=False, default=False)
5586
5595
5587 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
5596 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
5588 upload_user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.user_id', back_populates='artifacts')
5597 upload_user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.user_id', back_populates='artifacts')
5589
5598
5590 file_metadata = relationship('FileStoreMetadata', lazy='joined')
5599 file_metadata = relationship('FileStoreMetadata', lazy='joined')
5591
5600
5592 # scope limited to user, which requester have access to
5601 # scope limited to user, which requester have access to
5593 scope_user_id = Column(
5602 scope_user_id = Column(
5594 'scope_user_id', Integer(), ForeignKey('users.user_id'),
5603 'scope_user_id', Integer(), ForeignKey('users.user_id'),
5595 nullable=True, unique=None, default=None)
5604 nullable=True, unique=None, default=None)
5596 user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.scope_user_id', back_populates='scope_artifacts')
5605 user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.scope_user_id', back_populates='scope_artifacts')
5597
5606
5598 # scope limited to user group, which requester have access to
5607 # scope limited to user group, which requester have access to
5599 scope_user_group_id = Column(
5608 scope_user_group_id = Column(
5600 'scope_user_group_id', Integer(), ForeignKey('users_groups.users_group_id'),
5609 'scope_user_group_id', Integer(), ForeignKey('users_groups.users_group_id'),
5601 nullable=True, unique=None, default=None)
5610 nullable=True, unique=None, default=None)
5602 user_group = relationship('UserGroup', lazy='joined')
5611 user_group = relationship('UserGroup', lazy='joined')
5603
5612
5604 # scope limited to repo, which requester have access to
5613 # scope limited to repo, which requester have access to
5605 scope_repo_id = Column(
5614 scope_repo_id = Column(
5606 'scope_repo_id', Integer(), ForeignKey('repositories.repo_id'),
5615 'scope_repo_id', Integer(), ForeignKey('repositories.repo_id'),
5607 nullable=True, unique=None, default=None)
5616 nullable=True, unique=None, default=None)
5608 repo = relationship('Repository', lazy='joined')
5617 repo = relationship('Repository', lazy='joined')
5609
5618
5610 # scope limited to repo group, which requester have access to
5619 # scope limited to repo group, which requester have access to
5611 scope_repo_group_id = Column(
5620 scope_repo_group_id = Column(
5612 'scope_repo_group_id', Integer(), ForeignKey('groups.group_id'),
5621 'scope_repo_group_id', Integer(), ForeignKey('groups.group_id'),
5613 nullable=True, unique=None, default=None)
5622 nullable=True, unique=None, default=None)
5614 repo_group = relationship('RepoGroup', lazy='joined')
5623 repo_group = relationship('RepoGroup', lazy='joined')
5615
5624
5616 @classmethod
5625 @classmethod
5617 def get_scope(cls, scope_type, scope_id):
5626 def get_scope(cls, scope_type, scope_id):
5618 if scope_type == 'repo':
5627 if scope_type == 'repo':
5619 return f'repo:{scope_id}'
5628 return f'repo:{scope_id}'
5620 elif scope_type == 'repo-group':
5629 elif scope_type == 'repo-group':
5621 return f'repo-group:{scope_id}'
5630 return f'repo-group:{scope_id}'
5622 elif scope_type == 'user':
5631 elif scope_type == 'user':
5623 return f'user:{scope_id}'
5632 return f'user:{scope_id}'
5624 elif scope_type == 'user-group':
5633 elif scope_type == 'user-group':
5625 return f'user-group:{scope_id}'
5634 return f'user-group:{scope_id}'
5626 else:
5635 else:
5627 return scope_type
5636 return scope_type
5628
5637
5629 @classmethod
5638 @classmethod
5630 def get_by_store_uid(cls, file_store_uid, safe=False):
5639 def get_by_store_uid(cls, file_store_uid, safe=False):
5631 if safe:
5640 if safe:
5632 return FileStore.query().filter(FileStore.file_uid == file_store_uid).first()
5641 return FileStore.query().filter(FileStore.file_uid == file_store_uid).first()
5633 else:
5642 else:
5634 return FileStore.query().filter(FileStore.file_uid == file_store_uid).scalar()
5643 return FileStore.query().filter(FileStore.file_uid == file_store_uid).scalar()
5635
5644
5636 @classmethod
5645 @classmethod
5637 def create(cls, file_uid, filename, file_hash, file_size, file_display_name='',
5646 def create(cls, file_uid, filename, file_hash, file_size, file_display_name='',
5638 file_description='', enabled=True, hidden=False, check_acl=True,
5647 file_description='', enabled=True, hidden=False, check_acl=True,
5639 user_id=None, scope_user_id=None, scope_repo_id=None, scope_repo_group_id=None):
5648 user_id=None, scope_user_id=None, scope_repo_id=None, scope_repo_group_id=None):
5640
5649
5641 store_entry = FileStore()
5650 store_entry = FileStore()
5642 store_entry.file_uid = file_uid
5651 store_entry.file_uid = file_uid
5643 store_entry.file_display_name = file_display_name
5652 store_entry.file_display_name = file_display_name
5644 store_entry.file_org_name = filename
5653 store_entry.file_org_name = filename
5645 store_entry.file_size = file_size
5654 store_entry.file_size = file_size
5646 store_entry.file_hash = file_hash
5655 store_entry.file_hash = file_hash
5647 store_entry.file_description = file_description
5656 store_entry.file_description = file_description
5648
5657
5649 store_entry.check_acl = check_acl
5658 store_entry.check_acl = check_acl
5650 store_entry.enabled = enabled
5659 store_entry.enabled = enabled
5651 store_entry.hidden = hidden
5660 store_entry.hidden = hidden
5652
5661
5653 store_entry.user_id = user_id
5662 store_entry.user_id = user_id
5654 store_entry.scope_user_id = scope_user_id
5663 store_entry.scope_user_id = scope_user_id
5655 store_entry.scope_repo_id = scope_repo_id
5664 store_entry.scope_repo_id = scope_repo_id
5656 store_entry.scope_repo_group_id = scope_repo_group_id
5665 store_entry.scope_repo_group_id = scope_repo_group_id
5657
5666
5658 return store_entry
5667 return store_entry
5659
5668
5660 @classmethod
5669 @classmethod
5661 def store_metadata(cls, file_store_id, args, commit=True):
5670 def store_metadata(cls, file_store_id, args, commit=True):
5662 file_store = FileStore.get(file_store_id)
5671 file_store = FileStore.get(file_store_id)
5663 if file_store is None:
5672 if file_store is None:
5664 return
5673 return
5665
5674
5666 for section, key, value, value_type in args:
5675 for section, key, value, value_type in args:
5667 has_key = FileStoreMetadata().query() \
5676 has_key = FileStoreMetadata().query() \
5668 .filter(FileStoreMetadata.file_store_id == file_store.file_store_id) \
5677 .filter(FileStoreMetadata.file_store_id == file_store.file_store_id) \
5669 .filter(FileStoreMetadata.file_store_meta_section == section) \
5678 .filter(FileStoreMetadata.file_store_meta_section == section) \
5670 .filter(FileStoreMetadata.file_store_meta_key == key) \
5679 .filter(FileStoreMetadata.file_store_meta_key == key) \
5671 .scalar()
5680 .scalar()
5672 if has_key:
5681 if has_key:
5673 msg = 'key `{}` already defined under section `{}` for this file.'\
5682 msg = 'key `{}` already defined under section `{}` for this file.'\
5674 .format(key, section)
5683 .format(key, section)
5675 raise ArtifactMetadataDuplicate(msg, err_section=section, err_key=key)
5684 raise ArtifactMetadataDuplicate(msg, err_section=section, err_key=key)
5676
5685
5677 # NOTE(marcink): raises ArtifactMetadataBadValueType
5686 # NOTE(marcink): raises ArtifactMetadataBadValueType
5678 FileStoreMetadata.valid_value_type(value_type)
5687 FileStoreMetadata.valid_value_type(value_type)
5679
5688
5680 meta_entry = FileStoreMetadata()
5689 meta_entry = FileStoreMetadata()
5681 meta_entry.file_store = file_store
5690 meta_entry.file_store = file_store
5682 meta_entry.file_store_meta_section = section
5691 meta_entry.file_store_meta_section = section
5683 meta_entry.file_store_meta_key = key
5692 meta_entry.file_store_meta_key = key
5684 meta_entry.file_store_meta_value_type = value_type
5693 meta_entry.file_store_meta_value_type = value_type
5685 meta_entry.file_store_meta_value = value
5694 meta_entry.file_store_meta_value = value
5686
5695
5687 Session().add(meta_entry)
5696 Session().add(meta_entry)
5688
5697
5689 try:
5698 try:
5690 if commit:
5699 if commit:
5691 Session().commit()
5700 Session().commit()
5692 except IntegrityError:
5701 except IntegrityError:
5693 Session().rollback()
5702 Session().rollback()
5694 raise ArtifactMetadataDuplicate('Duplicate section/key found for this file.')
5703 raise ArtifactMetadataDuplicate('Duplicate section/key found for this file.')
5695
5704
5696 @classmethod
5705 @classmethod
5697 def bump_access_counter(cls, file_uid, commit=True):
5706 def bump_access_counter(cls, file_uid, commit=True):
5698 FileStore().query()\
5707 FileStore().query()\
5699 .filter(FileStore.file_uid == file_uid)\
5708 .filter(FileStore.file_uid == file_uid)\
5700 .update({FileStore.accessed_count: (FileStore.accessed_count + 1),
5709 .update({FileStore.accessed_count: (FileStore.accessed_count + 1),
5701 FileStore.accessed_on: datetime.datetime.now()})
5710 FileStore.accessed_on: datetime.datetime.now()})
5702 if commit:
5711 if commit:
5703 Session().commit()
5712 Session().commit()
5704
5713
5705 def __json__(self):
5714 def __json__(self):
5706 data = {
5715 data = {
5707 'filename': self.file_display_name,
5716 'filename': self.file_display_name,
5708 'filename_org': self.file_org_name,
5717 'filename_org': self.file_org_name,
5709 'file_uid': self.file_uid,
5718 'file_uid': self.file_uid,
5710 'description': self.file_description,
5719 'description': self.file_description,
5711 'hidden': self.hidden,
5720 'hidden': self.hidden,
5712 'size': self.file_size,
5721 'size': self.file_size,
5713 'created_on': self.created_on,
5722 'created_on': self.created_on,
5714 'uploaded_by': self.upload_user.get_api_data(details='basic'),
5723 'uploaded_by': self.upload_user.get_api_data(details='basic'),
5715 'downloaded_times': self.accessed_count,
5724 'downloaded_times': self.accessed_count,
5716 'sha256': self.file_hash,
5725 'sha256': self.file_hash,
5717 'metadata': self.file_metadata,
5726 'metadata': self.file_metadata,
5718 }
5727 }
5719
5728
5720 return data
5729 return data
5721
5730
5722 def __repr__(self):
5731 def __repr__(self):
5723 return f'<FileStore({self.file_store_id})>'
5732 return f'<FileStore({self.file_store_id})>'
5724
5733
5725
5734
5726 class FileStoreMetadata(Base, BaseModel):
5735 class FileStoreMetadata(Base, BaseModel):
5727 __tablename__ = 'file_store_metadata'
5736 __tablename__ = 'file_store_metadata'
5728 __table_args__ = (
5737 __table_args__ = (
5729 UniqueConstraint('file_store_id', 'file_store_meta_section_hash', 'file_store_meta_key_hash'),
5738 UniqueConstraint('file_store_id', 'file_store_meta_section_hash', 'file_store_meta_key_hash'),
5730 Index('file_store_meta_section_idx', 'file_store_meta_section', mysql_length=255),
5739 Index('file_store_meta_section_idx', 'file_store_meta_section', mysql_length=255),
5731 Index('file_store_meta_key_idx', 'file_store_meta_key', mysql_length=255),
5740 Index('file_store_meta_key_idx', 'file_store_meta_key', mysql_length=255),
5732 base_table_args
5741 base_table_args
5733 )
5742 )
5734 SETTINGS_TYPES = {
5743 SETTINGS_TYPES = {
5735 'str': safe_str,
5744 'str': safe_str,
5736 'int': safe_int,
5745 'int': safe_int,
5737 'unicode': safe_str,
5746 'unicode': safe_str,
5738 'bool': str2bool,
5747 'bool': str2bool,
5739 'list': functools.partial(aslist, sep=',')
5748 'list': functools.partial(aslist, sep=',')
5740 }
5749 }
5741
5750
5742 file_store_meta_id = Column(
5751 file_store_meta_id = Column(
5743 "file_store_meta_id", Integer(), nullable=False, unique=True, default=None,
5752 "file_store_meta_id", Integer(), nullable=False, unique=True, default=None,
5744 primary_key=True)
5753 primary_key=True)
5745 _file_store_meta_section = Column(
5754 _file_store_meta_section = Column(
5746 "file_store_meta_section", UnicodeText().with_variant(UnicodeText(1024), 'mysql'),
5755 "file_store_meta_section", UnicodeText().with_variant(UnicodeText(1024), 'mysql'),
5747 nullable=True, unique=None, default=None)
5756 nullable=True, unique=None, default=None)
5748 _file_store_meta_section_hash = Column(
5757 _file_store_meta_section_hash = Column(
5749 "file_store_meta_section_hash", String(255),
5758 "file_store_meta_section_hash", String(255),
5750 nullable=True, unique=None, default=None)
5759 nullable=True, unique=None, default=None)
5751 _file_store_meta_key = Column(
5760 _file_store_meta_key = Column(
5752 "file_store_meta_key", UnicodeText().with_variant(UnicodeText(1024), 'mysql'),
5761 "file_store_meta_key", UnicodeText().with_variant(UnicodeText(1024), 'mysql'),
5753 nullable=True, unique=None, default=None)
5762 nullable=True, unique=None, default=None)
5754 _file_store_meta_key_hash = Column(
5763 _file_store_meta_key_hash = Column(
5755 "file_store_meta_key_hash", String(255), nullable=True, unique=None, default=None)
5764 "file_store_meta_key_hash", String(255), nullable=True, unique=None, default=None)
5756 _file_store_meta_value = Column(
5765 _file_store_meta_value = Column(
5757 "file_store_meta_value", UnicodeText().with_variant(UnicodeText(20480), 'mysql'),
5766 "file_store_meta_value", UnicodeText().with_variant(UnicodeText(20480), 'mysql'),
5758 nullable=True, unique=None, default=None)
5767 nullable=True, unique=None, default=None)
5759 _file_store_meta_value_type = Column(
5768 _file_store_meta_value_type = Column(
5760 "file_store_meta_value_type", String(255), nullable=True, unique=None,
5769 "file_store_meta_value_type", String(255), nullable=True, unique=None,
5761 default='unicode')
5770 default='unicode')
5762
5771
5763 file_store_id = Column(
5772 file_store_id = Column(
5764 'file_store_id', Integer(), ForeignKey('file_store.file_store_id'),
5773 'file_store_id', Integer(), ForeignKey('file_store.file_store_id'),
5765 nullable=True, unique=None, default=None)
5774 nullable=True, unique=None, default=None)
5766
5775
5767 file_store = relationship('FileStore', lazy='joined', viewonly=True)
5776 file_store = relationship('FileStore', lazy='joined', viewonly=True)
5768
5777
5769 @classmethod
5778 @classmethod
5770 def valid_value_type(cls, value):
5779 def valid_value_type(cls, value):
5771 if value.split('.')[0] not in cls.SETTINGS_TYPES:
5780 if value.split('.')[0] not in cls.SETTINGS_TYPES:
5772 raise ArtifactMetadataBadValueType(
5781 raise ArtifactMetadataBadValueType(
5773 'value_type must be one of %s got %s' % (cls.SETTINGS_TYPES.keys(), value))
5782 'value_type must be one of %s got %s' % (cls.SETTINGS_TYPES.keys(), value))
5774
5783
5775 @hybrid_property
5784 @hybrid_property
5776 def file_store_meta_section(self):
5785 def file_store_meta_section(self):
5777 return self._file_store_meta_section
5786 return self._file_store_meta_section
5778
5787
5779 @file_store_meta_section.setter
5788 @file_store_meta_section.setter
5780 def file_store_meta_section(self, value):
5789 def file_store_meta_section(self, value):
5781 self._file_store_meta_section = value
5790 self._file_store_meta_section = value
5782 self._file_store_meta_section_hash = _hash_key(value)
5791 self._file_store_meta_section_hash = _hash_key(value)
5783
5792
5784 @hybrid_property
5793 @hybrid_property
5785 def file_store_meta_key(self):
5794 def file_store_meta_key(self):
5786 return self._file_store_meta_key
5795 return self._file_store_meta_key
5787
5796
5788 @file_store_meta_key.setter
5797 @file_store_meta_key.setter
5789 def file_store_meta_key(self, value):
5798 def file_store_meta_key(self, value):
5790 self._file_store_meta_key = value
5799 self._file_store_meta_key = value
5791 self._file_store_meta_key_hash = _hash_key(value)
5800 self._file_store_meta_key_hash = _hash_key(value)
5792
5801
5793 @hybrid_property
5802 @hybrid_property
5794 def file_store_meta_value(self):
5803 def file_store_meta_value(self):
5795 val = self._file_store_meta_value
5804 val = self._file_store_meta_value
5796
5805
5797 if self._file_store_meta_value_type:
5806 if self._file_store_meta_value_type:
5798 # e.g unicode.encrypted == unicode
5807 # e.g unicode.encrypted == unicode
5799 _type = self._file_store_meta_value_type.split('.')[0]
5808 _type = self._file_store_meta_value_type.split('.')[0]
5800 # decode the encrypted value if it's encrypted field type
5809 # decode the encrypted value if it's encrypted field type
5801 if '.encrypted' in self._file_store_meta_value_type:
5810 if '.encrypted' in self._file_store_meta_value_type:
5802 cipher = EncryptedTextValue()
5811 cipher = EncryptedTextValue()
5803 val = safe_str(cipher.process_result_value(val, None))
5812 val = safe_str(cipher.process_result_value(val, None))
5804 # do final type conversion
5813 # do final type conversion
5805 converter = self.SETTINGS_TYPES.get(_type) or self.SETTINGS_TYPES['unicode']
5814 converter = self.SETTINGS_TYPES.get(_type) or self.SETTINGS_TYPES['unicode']
5806 val = converter(val)
5815 val = converter(val)
5807
5816
5808 return val
5817 return val
5809
5818
5810 @file_store_meta_value.setter
5819 @file_store_meta_value.setter
5811 def file_store_meta_value(self, val):
5820 def file_store_meta_value(self, val):
5812 val = safe_str(val)
5821 val = safe_str(val)
5813 # encode the encrypted value
5822 # encode the encrypted value
5814 if '.encrypted' in self.file_store_meta_value_type:
5823 if '.encrypted' in self.file_store_meta_value_type:
5815 cipher = EncryptedTextValue()
5824 cipher = EncryptedTextValue()
5816 val = safe_str(cipher.process_bind_param(val, None))
5825 val = safe_str(cipher.process_bind_param(val, None))
5817 self._file_store_meta_value = val
5826 self._file_store_meta_value = val
5818
5827
5819 @hybrid_property
5828 @hybrid_property
5820 def file_store_meta_value_type(self):
5829 def file_store_meta_value_type(self):
5821 return self._file_store_meta_value_type
5830 return self._file_store_meta_value_type
5822
5831
5823 @file_store_meta_value_type.setter
5832 @file_store_meta_value_type.setter
5824 def file_store_meta_value_type(self, val):
5833 def file_store_meta_value_type(self, val):
5825 # e.g unicode.encrypted
5834 # e.g unicode.encrypted
5826 self.valid_value_type(val)
5835 self.valid_value_type(val)
5827 self._file_store_meta_value_type = val
5836 self._file_store_meta_value_type = val
5828
5837
5829 def __json__(self):
5838 def __json__(self):
5830 data = {
5839 data = {
5831 'artifact': self.file_store.file_uid,
5840 'artifact': self.file_store.file_uid,
5832 'section': self.file_store_meta_section,
5841 'section': self.file_store_meta_section,
5833 'key': self.file_store_meta_key,
5842 'key': self.file_store_meta_key,
5834 'value': self.file_store_meta_value,
5843 'value': self.file_store_meta_value,
5835 }
5844 }
5836
5845
5837 return data
5846 return data
5838
5847
5839 def __repr__(self):
5848 def __repr__(self):
5840 return '<%s[%s]%s=>%s]>' % (self.cls_name, self.file_store_meta_section,
5849 return '<%s[%s]%s=>%s]>' % (self.cls_name, self.file_store_meta_section,
5841 self.file_store_meta_key, self.file_store_meta_value)
5850 self.file_store_meta_key, self.file_store_meta_value)
5842
5851
5843
5852
5844 class DbMigrateVersion(Base, BaseModel):
5853 class DbMigrateVersion(Base, BaseModel):
5845 __tablename__ = 'db_migrate_version'
5854 __tablename__ = 'db_migrate_version'
5846 __table_args__ = (
5855 __table_args__ = (
5847 base_table_args,
5856 base_table_args,
5848 )
5857 )
5849
5858
5850 repository_id = Column('repository_id', String(250), primary_key=True)
5859 repository_id = Column('repository_id', String(250), primary_key=True)
5851 repository_path = Column('repository_path', Text)
5860 repository_path = Column('repository_path', Text)
5852 version = Column('version', Integer)
5861 version = Column('version', Integer)
5853
5862
5854 @classmethod
5863 @classmethod
5855 def set_version(cls, version):
5864 def set_version(cls, version):
5856 """
5865 """
5857 Helper for forcing a different version, usually for debugging purposes via ishell.
5866 Helper for forcing a different version, usually for debugging purposes via ishell.
5858 """
5867 """
5859 ver = DbMigrateVersion.query().first()
5868 ver = DbMigrateVersion.query().first()
5860 ver.version = version
5869 ver.version = version
5861 Session().commit()
5870 Session().commit()
5862
5871
5863
5872
5864 class DbSession(Base, BaseModel):
5873 class DbSession(Base, BaseModel):
5865 __tablename__ = 'db_session'
5874 __tablename__ = 'db_session'
5866 __table_args__ = (
5875 __table_args__ = (
5867 base_table_args,
5876 base_table_args,
5868 )
5877 )
5869
5878
5870 def __repr__(self):
5879 def __repr__(self):
5871 return f'<DB:DbSession({self.id})>'
5880 return f'<DB:DbSession({self.id})>'
5872
5881
5873 id = Column('id', Integer())
5882 id = Column('id', Integer())
5874 namespace = Column('namespace', String(255), primary_key=True)
5883 namespace = Column('namespace', String(255), primary_key=True)
5875 accessed = Column('accessed', DateTime, nullable=False)
5884 accessed = Column('accessed', DateTime, nullable=False)
5876 created = Column('created', DateTime, nullable=False)
5885 created = Column('created', DateTime, nullable=False)
5877 data = Column('data', PickleType, nullable=False)
5886 data = Column('data', PickleType, nullable=False)
@@ -1,1063 +1,1062 b''
1 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 """
19 """
20 Scm model for RhodeCode
20 Scm model for RhodeCode
21 """
21 """
22
22
23 import os.path
23 import os.path
24 import traceback
24 import traceback
25 import logging
25 import logging
26 import io
26 import io
27
27
28 from sqlalchemy import func
28 from sqlalchemy import func
29 from zope.cachedescriptors.property import Lazy as LazyProperty
29 from zope.cachedescriptors.property import Lazy as LazyProperty
30
30
31 import rhodecode
31 import rhodecode
32 from rhodecode.lib.str_utils import safe_bytes
32 from rhodecode.lib.str_utils import safe_bytes
33 from rhodecode.lib.vcs import get_backend
33 from rhodecode.lib.vcs import get_backend
34 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
34 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
35 from rhodecode.lib.vcs.nodes import FileNode
35 from rhodecode.lib.vcs.nodes import FileNode
36 from rhodecode.lib.vcs.backends.base import EmptyCommit
36 from rhodecode.lib.vcs.backends.base import EmptyCommit
37 from rhodecode.lib import helpers as h, rc_cache
37 from rhodecode.lib import helpers as h, rc_cache
38 from rhodecode.lib.auth import (
38 from rhodecode.lib.auth import (
39 HasRepoPermissionAny, HasRepoGroupPermissionAny,
39 HasRepoPermissionAny, HasRepoGroupPermissionAny,
40 HasUserGroupPermissionAny)
40 HasUserGroupPermissionAny)
41 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
41 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
42 from rhodecode.lib import hooks_utils
42 from rhodecode.lib import hooks_utils
43 from rhodecode.lib.utils import (
43 from rhodecode.lib.utils import (
44 get_filesystem_repos, make_db_config)
44 get_filesystem_repos, make_db_config)
45 from rhodecode.lib.str_utils import safe_str
45 from rhodecode.lib.str_utils import safe_str
46 from rhodecode.lib.system_info import get_system_info
46 from rhodecode.lib.system_info import get_system_info
47 from rhodecode.model import BaseModel
47 from rhodecode.model import BaseModel
48 from rhodecode.model.db import (
48 from rhodecode.model.db import (
49 or_, false, null,
49 or_, false, null,
50 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
50 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
51 PullRequest, FileStore)
51 PullRequest, FileStore)
52 from rhodecode.model.settings import VcsSettingsModel
52 from rhodecode.model.settings import VcsSettingsModel
53 from rhodecode.model.validation_schema.validators import url_validator, InvalidCloneUrl
53 from rhodecode.model.validation_schema.validators import url_validator, InvalidCloneUrl
54
54
55 log = logging.getLogger(__name__)
55 log = logging.getLogger(__name__)
56
56
57
57
58 class UserTemp(object):
58 class UserTemp(object):
59 def __init__(self, user_id):
59 def __init__(self, user_id):
60 self.user_id = user_id
60 self.user_id = user_id
61
61
62 def __repr__(self):
62 def __repr__(self):
63 return "<{}('id:{}')>".format(self.__class__.__name__, self.user_id)
63 return "<{}('id:{}')>".format(self.__class__.__name__, self.user_id)
64
64
65
65
66 class RepoTemp(object):
66 class RepoTemp(object):
67 def __init__(self, repo_id):
67 def __init__(self, repo_id):
68 self.repo_id = repo_id
68 self.repo_id = repo_id
69
69
70 def __repr__(self):
70 def __repr__(self):
71 return "<{}('id:{}')>".format(self.__class__.__name__, self.repo_id)
71 return "<{}('id:{}')>".format(self.__class__.__name__, self.repo_id)
72
72
73
73
74 class SimpleCachedRepoList(object):
74 class SimpleCachedRepoList(object):
75 """
75 """
76 Lighter version of of iteration of repos without the scm initialisation,
76 Lighter version of of iteration of repos without the scm initialisation,
77 and with cache usage
77 and with cache usage
78 """
78 """
79 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
79 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
80 self.db_repo_list = db_repo_list
80 self.db_repo_list = db_repo_list
81 self.repos_path = repos_path
81 self.repos_path = repos_path
82 self.order_by = order_by
82 self.order_by = order_by
83 self.reversed = (order_by or '').startswith('-')
83 self.reversed = (order_by or '').startswith('-')
84 if not perm_set:
84 if not perm_set:
85 perm_set = ['repository.read', 'repository.write',
85 perm_set = ['repository.read', 'repository.write',
86 'repository.admin']
86 'repository.admin']
87 self.perm_set = perm_set
87 self.perm_set = perm_set
88
88
89 def __len__(self):
89 def __len__(self):
90 return len(self.db_repo_list)
90 return len(self.db_repo_list)
91
91
92 def __repr__(self):
92 def __repr__(self):
93 return '<{} ({})>'.format(self.__class__.__name__, self.__len__())
93 return '<{} ({})>'.format(self.__class__.__name__, self.__len__())
94
94
95 def __iter__(self):
95 def __iter__(self):
96 for dbr in self.db_repo_list:
96 for dbr in self.db_repo_list:
97 # check permission at this level
97 # check permission at this level
98 has_perm = HasRepoPermissionAny(*self.perm_set)(
98 has_perm = HasRepoPermissionAny(*self.perm_set)(
99 dbr.repo_name, 'SimpleCachedRepoList check')
99 dbr.repo_name, 'SimpleCachedRepoList check')
100 if not has_perm:
100 if not has_perm:
101 continue
101 continue
102
102
103 tmp_d = {
103 tmp_d = {
104 'name': dbr.repo_name,
104 'name': dbr.repo_name,
105 'dbrepo': dbr.get_dict(),
105 'dbrepo': dbr.get_dict(),
106 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
106 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
107 }
107 }
108 yield tmp_d
108 yield tmp_d
109
109
110
110
111 class _PermCheckIterator(object):
111 class _PermCheckIterator(object):
112
112
113 def __init__(
113 def __init__(
114 self, obj_list, obj_attr, perm_set, perm_checker,
114 self, obj_list, obj_attr, perm_set, perm_checker,
115 extra_kwargs=None):
115 extra_kwargs=None):
116 """
116 """
117 Creates iterator from given list of objects, additionally
117 Creates iterator from given list of objects, additionally
118 checking permission for them from perm_set var
118 checking permission for them from perm_set var
119
119
120 :param obj_list: list of db objects
120 :param obj_list: list of db objects
121 :param obj_attr: attribute of object to pass into perm_checker
121 :param obj_attr: attribute of object to pass into perm_checker
122 :param perm_set: list of permissions to check
122 :param perm_set: list of permissions to check
123 :param perm_checker: callable to check permissions against
123 :param perm_checker: callable to check permissions against
124 """
124 """
125 self.obj_list = obj_list
125 self.obj_list = obj_list
126 self.obj_attr = obj_attr
126 self.obj_attr = obj_attr
127 self.perm_set = perm_set
127 self.perm_set = perm_set
128 self.perm_checker = perm_checker(*self.perm_set)
128 self.perm_checker = perm_checker(*self.perm_set)
129 self.extra_kwargs = extra_kwargs or {}
129 self.extra_kwargs = extra_kwargs or {}
130
130
131 def __len__(self):
131 def __len__(self):
132 return len(self.obj_list)
132 return len(self.obj_list)
133
133
134 def __repr__(self):
134 def __repr__(self):
135 return '<{} ({})>'.format(self.__class__.__name__, self.__len__())
135 return '<{} ({})>'.format(self.__class__.__name__, self.__len__())
136
136
137 def __iter__(self):
137 def __iter__(self):
138 for db_obj in self.obj_list:
138 for db_obj in self.obj_list:
139 # check permission at this level
139 # check permission at this level
140 # NOTE(marcink): the __dict__.get() is ~4x faster then getattr()
140 # NOTE(marcink): the __dict__.get() is ~4x faster then getattr()
141 name = db_obj.__dict__.get(self.obj_attr, None)
141 name = db_obj.__dict__.get(self.obj_attr, None)
142 if not self.perm_checker(name, self.__class__.__name__, **self.extra_kwargs):
142 if not self.perm_checker(name, self.__class__.__name__, **self.extra_kwargs):
143 continue
143 continue
144
144
145 yield db_obj
145 yield db_obj
146
146
147
147
148 class RepoList(_PermCheckIterator):
148 class RepoList(_PermCheckIterator):
149
149
150 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
150 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
151 if not perm_set:
151 if not perm_set:
152 perm_set = ['repository.read', 'repository.write', 'repository.admin']
152 perm_set = ['repository.read', 'repository.write', 'repository.admin']
153
153
154 super().__init__(
154 super().__init__(
155 obj_list=db_repo_list,
155 obj_list=db_repo_list,
156 obj_attr='_repo_name', perm_set=perm_set,
156 obj_attr='_repo_name', perm_set=perm_set,
157 perm_checker=HasRepoPermissionAny,
157 perm_checker=HasRepoPermissionAny,
158 extra_kwargs=extra_kwargs)
158 extra_kwargs=extra_kwargs)
159
159
160
160
161 class RepoGroupList(_PermCheckIterator):
161 class RepoGroupList(_PermCheckIterator):
162
162
163 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
163 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
164 if not perm_set:
164 if not perm_set:
165 perm_set = ['group.read', 'group.write', 'group.admin']
165 perm_set = ['group.read', 'group.write', 'group.admin']
166
166
167 super().__init__(
167 super().__init__(
168 obj_list=db_repo_group_list,
168 obj_list=db_repo_group_list,
169 obj_attr='_group_name', perm_set=perm_set,
169 obj_attr='_group_name', perm_set=perm_set,
170 perm_checker=HasRepoGroupPermissionAny,
170 perm_checker=HasRepoGroupPermissionAny,
171 extra_kwargs=extra_kwargs)
171 extra_kwargs=extra_kwargs)
172
172
173
173
174 class UserGroupList(_PermCheckIterator):
174 class UserGroupList(_PermCheckIterator):
175
175
176 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
176 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
177 if not perm_set:
177 if not perm_set:
178 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
178 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
179
179
180 super().__init__(
180 super().__init__(
181 obj_list=db_user_group_list,
181 obj_list=db_user_group_list,
182 obj_attr='users_group_name', perm_set=perm_set,
182 obj_attr='users_group_name', perm_set=perm_set,
183 perm_checker=HasUserGroupPermissionAny,
183 perm_checker=HasUserGroupPermissionAny,
184 extra_kwargs=extra_kwargs)
184 extra_kwargs=extra_kwargs)
185
185
186
186
187 class ScmModel(BaseModel):
187 class ScmModel(BaseModel):
188 """
188 """
189 Generic Scm Model
189 Generic Scm Model
190 """
190 """
191
191
192 @LazyProperty
192 @LazyProperty
193 def repos_path(self):
193 def repos_path(self):
194 """
194 """
195 Gets the repositories root path from database
195 Gets the repositories root path from database
196 """
196 """
197
197
198 settings_model = VcsSettingsModel(sa=self.sa)
198 settings_model = VcsSettingsModel(sa=self.sa)
199 return settings_model.get_repos_location()
199 return settings_model.get_repos_location()
200
200
201 def repo_scan(self, repos_path=None):
201 def repo_scan(self, repos_path=None):
202 """
202 """
203 Listing of repositories in given path. This path should not be a
203 Listing of repositories in given path. This path should not be a
204 repository itself. Return a dictionary of repository objects
204 repository itself. Return a dictionary of repository objects
205
205
206 :param repos_path: path to directory containing repositories
206 :param repos_path: path to directory containing repositories
207 """
207 """
208
208
209 if repos_path is None:
209 if repos_path is None:
210 repos_path = self.repos_path
210 repos_path = self.repos_path
211
211
212 log.info('scanning for repositories in %s', repos_path)
212 log.info('scanning for repositories in %s', repos_path)
213
213
214 config = make_db_config()
214 config = make_db_config()
215 config.set('extensions', 'largefiles', '')
215 config.set('extensions', 'largefiles', '')
216 repos = {}
216 repos = {}
217
217
218 for name, path in get_filesystem_repos(repos_path, recursive=True):
218 for name, path in get_filesystem_repos(repos_path, recursive=True):
219 # name need to be decomposed and put back together using the /
219 # name need to be decomposed and put back together using the /
220 # since this is internal storage separator for rhodecode
220 # since this is internal storage separator for rhodecode
221 name = Repository.normalize_repo_name(name)
221 name = Repository.normalize_repo_name(name)
222
222
223 try:
223 try:
224 if name in repos:
224 if name in repos:
225 raise RepositoryError('Duplicate repository name %s '
225 raise RepositoryError('Duplicate repository name %s '
226 'found in %s' % (name, path))
226 'found in %s' % (name, path))
227 elif path[0] in rhodecode.BACKENDS:
227 elif path[0] in rhodecode.BACKENDS:
228 backend = get_backend(path[0])
228 backend = get_backend(path[0])
229 repos[name] = backend(path[1], config=config,
229 repos[name] = backend(path[1], config=config,
230 with_wire={"cache": False})
230 with_wire={"cache": False})
231 except OSError:
231 except OSError:
232 continue
232 continue
233 except RepositoryError:
233 except RepositoryError:
234 log.exception('Failed to create a repo')
234 log.exception('Failed to create a repo')
235 continue
235 continue
236
236
237 log.debug('found %s paths with repositories', len(repos))
237 log.debug('found %s paths with repositories', len(repos))
238 return repos
238 return repos
239
239
240 def get_repos(self, all_repos=None, sort_key=None):
240 def get_repos(self, all_repos=None, sort_key=None):
241 """
241 """
242 Get all repositories from db and for each repo create it's
242 Get all repositories from db and for each repo create it's
243 backend instance and fill that backed with information from database
243 backend instance and fill that backed with information from database
244
244
245 :param all_repos: list of repository names as strings
245 :param all_repos: list of repository names as strings
246 give specific repositories list, good for filtering
246 give specific repositories list, good for filtering
247
247
248 :param sort_key: initial sorting of repositories
248 :param sort_key: initial sorting of repositories
249 """
249 """
250 if all_repos is None:
250 if all_repos is None:
251 all_repos = self.sa.query(Repository)\
251 all_repos = self.sa.query(Repository)\
252 .filter(Repository.group_id == null())\
252 .filter(Repository.group_id == null())\
253 .order_by(func.lower(Repository.repo_name)).all()
253 .order_by(func.lower(Repository.repo_name)).all()
254 repo_iter = SimpleCachedRepoList(
254 repo_iter = SimpleCachedRepoList(
255 all_repos, repos_path=self.repos_path, order_by=sort_key)
255 all_repos, repos_path=self.repos_path, order_by=sort_key)
256 return repo_iter
256 return repo_iter
257
257
258 @staticmethod
258 @staticmethod
259 def get_parent_commits(parent_commit, scm_instance):
259 def get_parent_commits(parent_commit, scm_instance):
260 if not parent_commit:
260 if not parent_commit:
261 parent_commit = EmptyCommit(alias=scm_instance.alias)
261 parent_commit = EmptyCommit(alias=scm_instance.alias)
262
262
263 if isinstance(parent_commit, EmptyCommit):
263 if isinstance(parent_commit, EmptyCommit):
264 # EmptyCommit means we're editing empty repository
264 # EmptyCommit means we're editing empty repository
265 parents = None
265 parents = None
266 else:
266 else:
267 parents = [parent_commit]
267 parents = [parent_commit]
268 return parent_commit, parents
268 return parent_commit, parents
269
269
270 def initialize_inmemory_vars(self, user, repo, message, author):
270 def initialize_inmemory_vars(self, user, repo, message, author):
271 """
271 """
272 Initialize node specific objects for further usage
272 Initialize node specific objects for further usage
273 """
273 """
274 user = self._get_user(user)
274 user = self._get_user(user)
275 scm_instance = repo.scm_instance(cache=False)
275 scm_instance = repo.scm_instance(cache=False)
276 message = safe_str(message)
276 message = safe_str(message)
277 commiter = user.full_contact
277 commiter = user.full_contact
278 author = safe_str(author) if author else commiter
278 author = safe_str(author) if author else commiter
279 imc = scm_instance.in_memory_commit
279 imc = scm_instance.in_memory_commit
280
280
281 return user, scm_instance, message, commiter, author, imc
281 return user, scm_instance, message, commiter, author, imc
282
282
283 def get_repo_groups(self, all_groups=None):
283 def get_repo_groups(self, all_groups=None):
284 if all_groups is None:
284 if all_groups is None:
285 all_groups = RepoGroup.query()\
285 all_groups = RepoGroup.query()\
286 .filter(RepoGroup.group_parent_id == null()).all()
286 .filter(RepoGroup.group_parent_id == null()).all()
287 return [x for x in RepoGroupList(all_groups)]
287 return [x for x in RepoGroupList(all_groups)]
288
288
289 def mark_for_invalidation(self, repo_name, delete=False):
289 def mark_for_invalidation(self, repo_name, delete=False):
290 """
290 """
291 Mark caches of this repo invalid in the database. `delete` flag
291 Mark caches of this repo invalid in the database. `delete` flag
292 removes the cache entries
292 removes the cache entries
293
293
294 :param repo_name: the repo_name for which caches should be marked
294 :param repo_name: the repo_name for which caches should be marked
295 invalid, or deleted
295 invalid, or deleted
296 :param delete: delete the entry keys instead of setting bool
296 :param delete: delete the entry keys instead of setting bool
297 flag on them, and also purge caches used by the dogpile
297 flag on them, and also purge caches used by the dogpile
298 """
298 """
299 repo = Repository.get_by_repo_name(repo_name)
299 repo = Repository.get_by_repo_name(repo_name)
300
300
301 if repo:
301 if repo:
302 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
302 repo_namespace_key = CacheKey.REPO_INVALIDATION_NAMESPACE.format(repo_id=repo.repo_id)
303 repo_id=repo.repo_id)
303 CacheKey.set_invalidate(repo_namespace_key, delete=delete)
304 CacheKey.set_invalidate(invalidation_namespace, delete=delete)
305
304
306 repo_id = repo.repo_id
305 repo_id = repo.repo_id
307 config = repo._config
306 config = repo._config
308 config.set('extensions', 'largefiles', '')
307 config.set('extensions', 'largefiles', '')
309 repo.update_commit_cache(config=config, cs_cache=None)
308 repo.update_commit_cache(config=config, cs_cache=None)
310 if delete:
309 if delete:
311 cache_namespace_uid = f'cache_repo.{repo_id}'
310 cache_namespace_uid = f'cache_repo.{repo_id}'
312 rc_cache.clear_cache_namespace('cache_repo', cache_namespace_uid, method=rc_cache.CLEAR_INVALIDATE)
311 rc_cache.clear_cache_namespace('cache_repo', cache_namespace_uid, method=rc_cache.CLEAR_INVALIDATE)
313
312
314 def toggle_following_repo(self, follow_repo_id, user_id):
313 def toggle_following_repo(self, follow_repo_id, user_id):
315
314
316 f = self.sa.query(UserFollowing)\
315 f = self.sa.query(UserFollowing)\
317 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
316 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
318 .filter(UserFollowing.user_id == user_id).scalar()
317 .filter(UserFollowing.user_id == user_id).scalar()
319
318
320 if f is not None:
319 if f is not None:
321 try:
320 try:
322 self.sa.delete(f)
321 self.sa.delete(f)
323 return
322 return
324 except Exception:
323 except Exception:
325 log.error(traceback.format_exc())
324 log.error(traceback.format_exc())
326 raise
325 raise
327
326
328 try:
327 try:
329 f = UserFollowing()
328 f = UserFollowing()
330 f.user_id = user_id
329 f.user_id = user_id
331 f.follows_repo_id = follow_repo_id
330 f.follows_repo_id = follow_repo_id
332 self.sa.add(f)
331 self.sa.add(f)
333 except Exception:
332 except Exception:
334 log.error(traceback.format_exc())
333 log.error(traceback.format_exc())
335 raise
334 raise
336
335
337 def toggle_following_user(self, follow_user_id, user_id):
336 def toggle_following_user(self, follow_user_id, user_id):
338 f = self.sa.query(UserFollowing)\
337 f = self.sa.query(UserFollowing)\
339 .filter(UserFollowing.follows_user_id == follow_user_id)\
338 .filter(UserFollowing.follows_user_id == follow_user_id)\
340 .filter(UserFollowing.user_id == user_id).scalar()
339 .filter(UserFollowing.user_id == user_id).scalar()
341
340
342 if f is not None:
341 if f is not None:
343 try:
342 try:
344 self.sa.delete(f)
343 self.sa.delete(f)
345 return
344 return
346 except Exception:
345 except Exception:
347 log.error(traceback.format_exc())
346 log.error(traceback.format_exc())
348 raise
347 raise
349
348
350 try:
349 try:
351 f = UserFollowing()
350 f = UserFollowing()
352 f.user_id = user_id
351 f.user_id = user_id
353 f.follows_user_id = follow_user_id
352 f.follows_user_id = follow_user_id
354 self.sa.add(f)
353 self.sa.add(f)
355 except Exception:
354 except Exception:
356 log.error(traceback.format_exc())
355 log.error(traceback.format_exc())
357 raise
356 raise
358
357
359 def is_following_repo(self, repo_name, user_id, cache=False):
358 def is_following_repo(self, repo_name, user_id, cache=False):
360 r = self.sa.query(Repository)\
359 r = self.sa.query(Repository)\
361 .filter(Repository.repo_name == repo_name).scalar()
360 .filter(Repository.repo_name == repo_name).scalar()
362
361
363 f = self.sa.query(UserFollowing)\
362 f = self.sa.query(UserFollowing)\
364 .filter(UserFollowing.follows_repository == r)\
363 .filter(UserFollowing.follows_repository == r)\
365 .filter(UserFollowing.user_id == user_id).scalar()
364 .filter(UserFollowing.user_id == user_id).scalar()
366
365
367 return f is not None
366 return f is not None
368
367
369 def is_following_user(self, username, user_id, cache=False):
368 def is_following_user(self, username, user_id, cache=False):
370 u = User.get_by_username(username)
369 u = User.get_by_username(username)
371
370
372 f = self.sa.query(UserFollowing)\
371 f = self.sa.query(UserFollowing)\
373 .filter(UserFollowing.follows_user == u)\
372 .filter(UserFollowing.follows_user == u)\
374 .filter(UserFollowing.user_id == user_id).scalar()
373 .filter(UserFollowing.user_id == user_id).scalar()
375
374
376 return f is not None
375 return f is not None
377
376
378 def get_followers(self, repo):
377 def get_followers(self, repo):
379 repo = self._get_repo(repo)
378 repo = self._get_repo(repo)
380
379
381 return self.sa.query(UserFollowing)\
380 return self.sa.query(UserFollowing)\
382 .filter(UserFollowing.follows_repository == repo).count()
381 .filter(UserFollowing.follows_repository == repo).count()
383
382
384 def get_forks(self, repo):
383 def get_forks(self, repo):
385 repo = self._get_repo(repo)
384 repo = self._get_repo(repo)
386 return self.sa.query(Repository)\
385 return self.sa.query(Repository)\
387 .filter(Repository.fork == repo).count()
386 .filter(Repository.fork == repo).count()
388
387
389 def get_pull_requests(self, repo):
388 def get_pull_requests(self, repo):
390 repo = self._get_repo(repo)
389 repo = self._get_repo(repo)
391 return self.sa.query(PullRequest)\
390 return self.sa.query(PullRequest)\
392 .filter(PullRequest.target_repo == repo)\
391 .filter(PullRequest.target_repo == repo)\
393 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
392 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
394
393
395 def get_artifacts(self, repo):
394 def get_artifacts(self, repo):
396 repo = self._get_repo(repo)
395 repo = self._get_repo(repo)
397 return self.sa.query(FileStore)\
396 return self.sa.query(FileStore)\
398 .filter(FileStore.repo == repo)\
397 .filter(FileStore.repo == repo)\
399 .filter(or_(FileStore.hidden == null(), FileStore.hidden == false())).count()
398 .filter(or_(FileStore.hidden == null(), FileStore.hidden == false())).count()
400
399
401 def mark_as_fork(self, repo, fork, user):
400 def mark_as_fork(self, repo, fork, user):
402 repo = self._get_repo(repo)
401 repo = self._get_repo(repo)
403 fork = self._get_repo(fork)
402 fork = self._get_repo(fork)
404 if fork and repo.repo_id == fork.repo_id:
403 if fork and repo.repo_id == fork.repo_id:
405 raise Exception("Cannot set repository as fork of itself")
404 raise Exception("Cannot set repository as fork of itself")
406
405
407 if fork and repo.repo_type != fork.repo_type:
406 if fork and repo.repo_type != fork.repo_type:
408 raise RepositoryError(
407 raise RepositoryError(
409 "Cannot set repository as fork of repository with other type")
408 "Cannot set repository as fork of repository with other type")
410
409
411 repo.fork = fork
410 repo.fork = fork
412 self.sa.add(repo)
411 self.sa.add(repo)
413 return repo
412 return repo
414
413
415 def pull_changes(self, repo, username, remote_uri=None, validate_uri=True, **kwargs):
414 def pull_changes(self, repo, username, remote_uri=None, validate_uri=True, **kwargs):
416 dbrepo = self._get_repo(repo)
415 dbrepo = self._get_repo(repo)
417 remote_uri = remote_uri or dbrepo.clone_uri
416 remote_uri = remote_uri or dbrepo.clone_uri
418 if not remote_uri:
417 if not remote_uri:
419 raise Exception("This repository doesn't have a clone uri")
418 raise Exception("This repository doesn't have a clone uri")
420
419
421 repo = dbrepo.scm_instance(cache=False)
420 repo = dbrepo.scm_instance(cache=False)
422 repo.config.clear_section('hooks')
421 repo.config.clear_section('hooks')
423
422
424 try:
423 try:
425 # NOTE(marcink): add extra validation so we skip invalid urls
424 # NOTE(marcink): add extra validation so we skip invalid urls
426 # this is due this tasks can be executed via scheduler without
425 # this is due this tasks can be executed via scheduler without
427 # proper validation of remote_uri
426 # proper validation of remote_uri
428 if validate_uri:
427 if validate_uri:
429 config = make_db_config(clear_session=False)
428 config = make_db_config(clear_session=False)
430 url_validator(remote_uri, dbrepo.repo_type, config)
429 url_validator(remote_uri, dbrepo.repo_type, config)
431 except InvalidCloneUrl:
430 except InvalidCloneUrl:
432 raise
431 raise
433
432
434 repo_name = dbrepo.repo_name
433 repo_name = dbrepo.repo_name
435 try:
434 try:
436 # TODO: we need to make sure those operations call proper hooks !
435 # TODO: we need to make sure those operations call proper hooks !
437 repo.fetch(remote_uri, **kwargs)
436 repo.fetch(remote_uri, **kwargs)
438
437
439 self.mark_for_invalidation(repo_name)
438 self.mark_for_invalidation(repo_name)
440 except Exception:
439 except Exception:
441 log.error(traceback.format_exc())
440 log.error(traceback.format_exc())
442 raise
441 raise
443
442
444 def push_changes(self, repo, username, remote_uri=None, validate_uri=True, **kwargs):
443 def push_changes(self, repo, username, remote_uri=None, validate_uri=True, **kwargs):
445 dbrepo = self._get_repo(repo)
444 dbrepo = self._get_repo(repo)
446 remote_uri = remote_uri or dbrepo.push_uri
445 remote_uri = remote_uri or dbrepo.push_uri
447 if not remote_uri:
446 if not remote_uri:
448 raise Exception("This repository doesn't have a clone uri")
447 raise Exception("This repository doesn't have a clone uri")
449
448
450 repo = dbrepo.scm_instance(cache=False)
449 repo = dbrepo.scm_instance(cache=False)
451 repo.config.clear_section('hooks')
450 repo.config.clear_section('hooks')
452
451
453 try:
452 try:
454 # NOTE(marcink): add extra validation so we skip invalid urls
453 # NOTE(marcink): add extra validation so we skip invalid urls
455 # this is due this tasks can be executed via scheduler without
454 # this is due this tasks can be executed via scheduler without
456 # proper validation of remote_uri
455 # proper validation of remote_uri
457 if validate_uri:
456 if validate_uri:
458 config = make_db_config(clear_session=False)
457 config = make_db_config(clear_session=False)
459 url_validator(remote_uri, dbrepo.repo_type, config)
458 url_validator(remote_uri, dbrepo.repo_type, config)
460 except InvalidCloneUrl:
459 except InvalidCloneUrl:
461 raise
460 raise
462
461
463 try:
462 try:
464 repo.push(remote_uri, **kwargs)
463 repo.push(remote_uri, **kwargs)
465 except Exception:
464 except Exception:
466 log.error(traceback.format_exc())
465 log.error(traceback.format_exc())
467 raise
466 raise
468
467
469 def commit_change(self, repo, repo_name, commit, user, author, message,
468 def commit_change(self, repo, repo_name, commit, user, author, message,
470 content: bytes, f_path: bytes, branch: str = None):
469 content: bytes, f_path: bytes, branch: str = None):
471 """
470 """
472 Commits changes
471 Commits changes
473 """
472 """
474 user = self._get_user(user)
473 user = self._get_user(user)
475
474
476 # message and author needs to be unicode
475 # message and author needs to be unicode
477 # proper backend should then translate that into required type
476 # proper backend should then translate that into required type
478 message = safe_str(message)
477 message = safe_str(message)
479 author = safe_str(author)
478 author = safe_str(author)
480 imc = repo.in_memory_commit
479 imc = repo.in_memory_commit
481 imc.change(FileNode(f_path, content, mode=commit.get_file_mode(f_path)))
480 imc.change(FileNode(f_path, content, mode=commit.get_file_mode(f_path)))
482 try:
481 try:
483 # TODO: handle pre-push action !
482 # TODO: handle pre-push action !
484 tip = imc.commit(
483 tip = imc.commit(
485 message=message, author=author, parents=[commit],
484 message=message, author=author, parents=[commit],
486 branch=branch or commit.branch)
485 branch=branch or commit.branch)
487 except Exception as e:
486 except Exception as e:
488 log.error(traceback.format_exc())
487 log.error(traceback.format_exc())
489 raise IMCCommitError(str(e))
488 raise IMCCommitError(str(e))
490 finally:
489 finally:
491 # always clear caches, if commit fails we want fresh object also
490 # always clear caches, if commit fails we want fresh object also
492 self.mark_for_invalidation(repo_name)
491 self.mark_for_invalidation(repo_name)
493
492
494 # We trigger the post-push action
493 # We trigger the post-push action
495 hooks_utils.trigger_post_push_hook(
494 hooks_utils.trigger_post_push_hook(
496 username=user.username, action='push_local', hook_type='post_push',
495 username=user.username, action='push_local', hook_type='post_push',
497 repo_name=repo_name, repo_type=repo.alias, commit_ids=[tip.raw_id])
496 repo_name=repo_name, repo_type=repo.alias, commit_ids=[tip.raw_id])
498 return tip
497 return tip
499
498
500 def _sanitize_path(self, f_path: bytes):
499 def _sanitize_path(self, f_path: bytes):
501 if f_path.startswith(b'/') or f_path.startswith(b'./') or b'../' in f_path:
500 if f_path.startswith(b'/') or f_path.startswith(b'./') or b'../' in f_path:
502 raise NonRelativePathError(b'%b is not an relative path' % f_path)
501 raise NonRelativePathError(b'%b is not an relative path' % f_path)
503 if f_path:
502 if f_path:
504 f_path = os.path.normpath(f_path)
503 f_path = os.path.normpath(f_path)
505 return f_path
504 return f_path
506
505
507 def get_dirnode_metadata(self, request, commit, dir_node):
506 def get_dirnode_metadata(self, request, commit, dir_node):
508 if not dir_node.is_dir():
507 if not dir_node.is_dir():
509 return []
508 return []
510
509
511 data = []
510 data = []
512 for node in dir_node:
511 for node in dir_node:
513 if not node.is_file():
512 if not node.is_file():
514 # we skip file-nodes
513 # we skip file-nodes
515 continue
514 continue
516
515
517 last_commit = node.last_commit
516 last_commit = node.last_commit
518 last_commit_date = last_commit.date
517 last_commit_date = last_commit.date
519 data.append({
518 data.append({
520 'name': node.name,
519 'name': node.name,
521 'size': h.format_byte_size_binary(node.size),
520 'size': h.format_byte_size_binary(node.size),
522 'modified_at': h.format_date(last_commit_date),
521 'modified_at': h.format_date(last_commit_date),
523 'modified_ts': last_commit_date.isoformat(),
522 'modified_ts': last_commit_date.isoformat(),
524 'revision': last_commit.revision,
523 'revision': last_commit.revision,
525 'short_id': last_commit.short_id,
524 'short_id': last_commit.short_id,
526 'message': h.escape(last_commit.message),
525 'message': h.escape(last_commit.message),
527 'author': h.escape(last_commit.author),
526 'author': h.escape(last_commit.author),
528 'user_profile': h.gravatar_with_user(
527 'user_profile': h.gravatar_with_user(
529 request, last_commit.author),
528 request, last_commit.author),
530 })
529 })
531
530
532 return data
531 return data
533
532
534 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
533 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
535 extended_info=False, content=False, max_file_bytes=None):
534 extended_info=False, content=False, max_file_bytes=None):
536 """
535 """
537 recursive walk in root dir and return a set of all path in that dir
536 recursive walk in root dir and return a set of all path in that dir
538 based on repository walk function
537 based on repository walk function
539
538
540 :param repo_name: name of repository
539 :param repo_name: name of repository
541 :param commit_id: commit id for which to list nodes
540 :param commit_id: commit id for which to list nodes
542 :param root_path: root path to list
541 :param root_path: root path to list
543 :param flat: return as a list, if False returns a dict with description
542 :param flat: return as a list, if False returns a dict with description
544 :param extended_info: show additional info such as md5, binary, size etc
543 :param extended_info: show additional info such as md5, binary, size etc
545 :param content: add nodes content to the return data
544 :param content: add nodes content to the return data
546 :param max_file_bytes: will not return file contents over this limit
545 :param max_file_bytes: will not return file contents over this limit
547
546
548 """
547 """
549 _files = list()
548 _files = list()
550 _dirs = list()
549 _dirs = list()
551
550
552 try:
551 try:
553 _repo = self._get_repo(repo_name)
552 _repo = self._get_repo(repo_name)
554 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
553 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
555 root_path = root_path.lstrip('/')
554 root_path = root_path.lstrip('/')
556
555
557 # get RootNode, inject pre-load options before walking
556 # get RootNode, inject pre-load options before walking
558 top_node = commit.get_node(root_path)
557 top_node = commit.get_node(root_path)
559 extended_info_pre_load = []
558 extended_info_pre_load = []
560 if extended_info:
559 if extended_info:
561 extended_info_pre_load += ['md5']
560 extended_info_pre_load += ['md5']
562 top_node.default_pre_load = ['is_binary', 'size'] + extended_info_pre_load
561 top_node.default_pre_load = ['is_binary', 'size'] + extended_info_pre_load
563
562
564 for __, dirs, files in commit.walk(top_node):
563 for __, dirs, files in commit.walk(top_node):
565
564
566 for f in files:
565 for f in files:
567 _content = None
566 _content = None
568 _data = f_name = f.str_path
567 _data = f_name = f.str_path
569
568
570 if not flat:
569 if not flat:
571 _data = {
570 _data = {
572 "name": h.escape(f_name),
571 "name": h.escape(f_name),
573 "type": "file",
572 "type": "file",
574 }
573 }
575 if extended_info:
574 if extended_info:
576 _data.update({
575 _data.update({
577 "md5": f.md5,
576 "md5": f.md5,
578 "binary": f.is_binary,
577 "binary": f.is_binary,
579 "size": f.size,
578 "size": f.size,
580 "extension": f.extension,
579 "extension": f.extension,
581 "mimetype": f.mimetype,
580 "mimetype": f.mimetype,
582 "lines": f.lines()[0]
581 "lines": f.lines()[0]
583 })
582 })
584
583
585 if content:
584 if content:
586 over_size_limit = (max_file_bytes is not None
585 over_size_limit = (max_file_bytes is not None
587 and f.size > max_file_bytes)
586 and f.size > max_file_bytes)
588 full_content = None
587 full_content = None
589 if not f.is_binary and not over_size_limit:
588 if not f.is_binary and not over_size_limit:
590 full_content = f.str_content
589 full_content = f.str_content
591
590
592 _data.update({
591 _data.update({
593 "content": full_content,
592 "content": full_content,
594 })
593 })
595 _files.append(_data)
594 _files.append(_data)
596
595
597 for d in dirs:
596 for d in dirs:
598 _data = d_name = d.str_path
597 _data = d_name = d.str_path
599 if not flat:
598 if not flat:
600 _data = {
599 _data = {
601 "name": h.escape(d_name),
600 "name": h.escape(d_name),
602 "type": "dir",
601 "type": "dir",
603 }
602 }
604 if extended_info:
603 if extended_info:
605 _data.update({
604 _data.update({
606 "md5": "",
605 "md5": "",
607 "binary": False,
606 "binary": False,
608 "size": 0,
607 "size": 0,
609 "extension": "",
608 "extension": "",
610 })
609 })
611 if content:
610 if content:
612 _data.update({
611 _data.update({
613 "content": None
612 "content": None
614 })
613 })
615 _dirs.append(_data)
614 _dirs.append(_data)
616 except RepositoryError:
615 except RepositoryError:
617 log.exception("Exception in get_nodes")
616 log.exception("Exception in get_nodes")
618 raise
617 raise
619
618
620 return _dirs, _files
619 return _dirs, _files
621
620
622 def get_quick_filter_nodes(self, repo_name, commit_id, root_path='/'):
621 def get_quick_filter_nodes(self, repo_name, commit_id, root_path='/'):
623 """
622 """
624 Generate files for quick filter in files view
623 Generate files for quick filter in files view
625 """
624 """
626
625
627 _files = list()
626 _files = list()
628 _dirs = list()
627 _dirs = list()
629 try:
628 try:
630 _repo = self._get_repo(repo_name)
629 _repo = self._get_repo(repo_name)
631 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
630 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
632 root_path = root_path.lstrip('/')
631 root_path = root_path.lstrip('/')
633
632
634 top_node = commit.get_node(root_path)
633 top_node = commit.get_node(root_path)
635 top_node.default_pre_load = []
634 top_node.default_pre_load = []
636
635
637 for __, dirs, files in commit.walk(top_node):
636 for __, dirs, files in commit.walk(top_node):
638 for f in files:
637 for f in files:
639
638
640 _data = {
639 _data = {
641 "name": h.escape(f.str_path),
640 "name": h.escape(f.str_path),
642 "type": "file",
641 "type": "file",
643 }
642 }
644
643
645 _files.append(_data)
644 _files.append(_data)
646
645
647 for d in dirs:
646 for d in dirs:
648
647
649 _data = {
648 _data = {
650 "name": h.escape(d.str_path),
649 "name": h.escape(d.str_path),
651 "type": "dir",
650 "type": "dir",
652 }
651 }
653
652
654 _dirs.append(_data)
653 _dirs.append(_data)
655 except RepositoryError:
654 except RepositoryError:
656 log.exception("Exception in get_quick_filter_nodes")
655 log.exception("Exception in get_quick_filter_nodes")
657 raise
656 raise
658
657
659 return _dirs, _files
658 return _dirs, _files
660
659
661 def get_node(self, repo_name, commit_id, file_path,
660 def get_node(self, repo_name, commit_id, file_path,
662 extended_info=False, content=False, max_file_bytes=None, cache=True):
661 extended_info=False, content=False, max_file_bytes=None, cache=True):
663 """
662 """
664 retrieve single node from commit
663 retrieve single node from commit
665 """
664 """
666
665
667 try:
666 try:
668
667
669 _repo = self._get_repo(repo_name)
668 _repo = self._get_repo(repo_name)
670 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
669 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
671
670
672 file_node = commit.get_node(file_path)
671 file_node = commit.get_node(file_path)
673 if file_node.is_dir():
672 if file_node.is_dir():
674 raise RepositoryError('The given path is a directory')
673 raise RepositoryError('The given path is a directory')
675
674
676 _content = None
675 _content = None
677 f_name = file_node.str_path
676 f_name = file_node.str_path
678
677
679 file_data = {
678 file_data = {
680 "name": h.escape(f_name),
679 "name": h.escape(f_name),
681 "type": "file",
680 "type": "file",
682 }
681 }
683
682
684 if extended_info:
683 if extended_info:
685 file_data.update({
684 file_data.update({
686 "extension": file_node.extension,
685 "extension": file_node.extension,
687 "mimetype": file_node.mimetype,
686 "mimetype": file_node.mimetype,
688 })
687 })
689
688
690 if cache:
689 if cache:
691 md5 = file_node.md5
690 md5 = file_node.md5
692 is_binary = file_node.is_binary
691 is_binary = file_node.is_binary
693 size = file_node.size
692 size = file_node.size
694 else:
693 else:
695 is_binary, md5, size, _content = file_node.metadata_uncached()
694 is_binary, md5, size, _content = file_node.metadata_uncached()
696
695
697 file_data.update({
696 file_data.update({
698 "md5": md5,
697 "md5": md5,
699 "binary": is_binary,
698 "binary": is_binary,
700 "size": size,
699 "size": size,
701 })
700 })
702
701
703 if content and cache:
702 if content and cache:
704 # get content + cache
703 # get content + cache
705 size = file_node.size
704 size = file_node.size
706 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
705 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
707 full_content = None
706 full_content = None
708 all_lines = 0
707 all_lines = 0
709 if not file_node.is_binary and not over_size_limit:
708 if not file_node.is_binary and not over_size_limit:
710 full_content = safe_str(file_node.content)
709 full_content = safe_str(file_node.content)
711 all_lines, empty_lines = file_node.count_lines(full_content)
710 all_lines, empty_lines = file_node.count_lines(full_content)
712
711
713 file_data.update({
712 file_data.update({
714 "content": full_content,
713 "content": full_content,
715 "lines": all_lines
714 "lines": all_lines
716 })
715 })
717 elif content:
716 elif content:
718 # get content *without* cache
717 # get content *without* cache
719 if _content is None:
718 if _content is None:
720 is_binary, md5, size, _content = file_node.metadata_uncached()
719 is_binary, md5, size, _content = file_node.metadata_uncached()
721
720
722 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
721 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
723 full_content = None
722 full_content = None
724 all_lines = 0
723 all_lines = 0
725 if not is_binary and not over_size_limit:
724 if not is_binary and not over_size_limit:
726 full_content = safe_str(_content)
725 full_content = safe_str(_content)
727 all_lines, empty_lines = file_node.count_lines(full_content)
726 all_lines, empty_lines = file_node.count_lines(full_content)
728
727
729 file_data.update({
728 file_data.update({
730 "content": full_content,
729 "content": full_content,
731 "lines": all_lines
730 "lines": all_lines
732 })
731 })
733
732
734 except RepositoryError:
733 except RepositoryError:
735 log.exception("Exception in get_node")
734 log.exception("Exception in get_node")
736 raise
735 raise
737
736
738 return file_data
737 return file_data
739
738
740 def get_fts_data(self, repo_name, commit_id, root_path='/'):
739 def get_fts_data(self, repo_name, commit_id, root_path='/'):
741 """
740 """
742 Fetch node tree for usage in full text search
741 Fetch node tree for usage in full text search
743 """
742 """
744
743
745 tree_info = list()
744 tree_info = list()
746
745
747 try:
746 try:
748 _repo = self._get_repo(repo_name)
747 _repo = self._get_repo(repo_name)
749 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
748 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
750 root_path = root_path.lstrip('/')
749 root_path = root_path.lstrip('/')
751 top_node = commit.get_node(root_path)
750 top_node = commit.get_node(root_path)
752 top_node.default_pre_load = []
751 top_node.default_pre_load = []
753
752
754 for __, dirs, files in commit.walk(top_node):
753 for __, dirs, files in commit.walk(top_node):
755
754
756 for f in files:
755 for f in files:
757 is_binary, md5, size, _content = f.metadata_uncached()
756 is_binary, md5, size, _content = f.metadata_uncached()
758 _data = {
757 _data = {
759 "name": f.str_path,
758 "name": f.str_path,
760 "md5": md5,
759 "md5": md5,
761 "extension": f.extension,
760 "extension": f.extension,
762 "binary": is_binary,
761 "binary": is_binary,
763 "size": size
762 "size": size
764 }
763 }
765
764
766 tree_info.append(_data)
765 tree_info.append(_data)
767
766
768 except RepositoryError:
767 except RepositoryError:
769 log.exception("Exception in get_nodes")
768 log.exception("Exception in get_nodes")
770 raise
769 raise
771
770
772 return tree_info
771 return tree_info
773
772
774 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
773 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
775 author=None, trigger_push_hook=True):
774 author=None, trigger_push_hook=True):
776 """
775 """
777 Commits given multiple nodes into repo
776 Commits given multiple nodes into repo
778
777
779 :param user: RhodeCode User object or user_id, the commiter
778 :param user: RhodeCode User object or user_id, the commiter
780 :param repo: RhodeCode Repository object
779 :param repo: RhodeCode Repository object
781 :param message: commit message
780 :param message: commit message
782 :param nodes: mapping {filename:{'content':content},...}
781 :param nodes: mapping {filename:{'content':content},...}
783 :param parent_commit: parent commit, can be empty than it's
782 :param parent_commit: parent commit, can be empty than it's
784 initial commit
783 initial commit
785 :param author: author of commit, cna be different that commiter
784 :param author: author of commit, cna be different that commiter
786 only for git
785 only for git
787 :param trigger_push_hook: trigger push hooks
786 :param trigger_push_hook: trigger push hooks
788
787
789 :returns: new committed commit
788 :returns: new committed commit
790 """
789 """
791 user, scm_instance, message, commiter, author, imc = self.initialize_inmemory_vars(
790 user, scm_instance, message, commiter, author, imc = self.initialize_inmemory_vars(
792 user, repo, message, author)
791 user, repo, message, author)
793
792
794 parent_commit, parents = self.get_parent_commits(parent_commit, scm_instance)
793 parent_commit, parents = self.get_parent_commits(parent_commit, scm_instance)
795
794
796 upload_file_types = (io.BytesIO, io.BufferedRandom)
795 upload_file_types = (io.BytesIO, io.BufferedRandom)
797 processed_nodes = []
796 processed_nodes = []
798 for filename, content_dict in nodes.items():
797 for filename, content_dict in nodes.items():
799 if not isinstance(filename, bytes):
798 if not isinstance(filename, bytes):
800 raise ValueError(f'filename key in nodes needs to be bytes , or {upload_file_types}')
799 raise ValueError(f'filename key in nodes needs to be bytes , or {upload_file_types}')
801 content = content_dict['content']
800 content = content_dict['content']
802 if not isinstance(content, upload_file_types + (bytes,)):
801 if not isinstance(content, upload_file_types + (bytes,)):
803 raise ValueError('content key value in nodes needs to be bytes')
802 raise ValueError('content key value in nodes needs to be bytes')
804
803
805 for f_path in nodes:
804 for f_path in nodes:
806 f_path = self._sanitize_path(f_path)
805 f_path = self._sanitize_path(f_path)
807 content = nodes[f_path]['content']
806 content = nodes[f_path]['content']
808
807
809 # decoding here will force that we have proper encoded values
808 # decoding here will force that we have proper encoded values
810 # in any other case this will throw exceptions and deny commit
809 # in any other case this will throw exceptions and deny commit
811
810
812 if isinstance(content, bytes):
811 if isinstance(content, bytes):
813 pass
812 pass
814 elif isinstance(content, upload_file_types):
813 elif isinstance(content, upload_file_types):
815 content = content.read()
814 content = content.read()
816 else:
815 else:
817 raise Exception(f'Content is of unrecognized type {type(content)}, expected {upload_file_types}')
816 raise Exception(f'Content is of unrecognized type {type(content)}, expected {upload_file_types}')
818 processed_nodes.append((f_path, content))
817 processed_nodes.append((f_path, content))
819
818
820 # add multiple nodes
819 # add multiple nodes
821 for path, content in processed_nodes:
820 for path, content in processed_nodes:
822 imc.add(FileNode(path, content=content))
821 imc.add(FileNode(path, content=content))
823
822
824 # TODO: handle pre push scenario
823 # TODO: handle pre push scenario
825 tip = imc.commit(message=message,
824 tip = imc.commit(message=message,
826 author=author,
825 author=author,
827 parents=parents,
826 parents=parents,
828 branch=parent_commit.branch)
827 branch=parent_commit.branch)
829
828
830 self.mark_for_invalidation(repo.repo_name)
829 self.mark_for_invalidation(repo.repo_name)
831 if trigger_push_hook:
830 if trigger_push_hook:
832 hooks_utils.trigger_post_push_hook(
831 hooks_utils.trigger_post_push_hook(
833 username=user.username, action='push_local',
832 username=user.username, action='push_local',
834 repo_name=repo.repo_name, repo_type=scm_instance.alias,
833 repo_name=repo.repo_name, repo_type=scm_instance.alias,
835 hook_type='post_push',
834 hook_type='post_push',
836 commit_ids=[tip.raw_id])
835 commit_ids=[tip.raw_id])
837 return tip
836 return tip
838
837
839 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
838 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
840 author=None, trigger_push_hook=True):
839 author=None, trigger_push_hook=True):
841 user, scm_instance, message, commiter, author, imc = self.initialize_inmemory_vars(
840 user, scm_instance, message, commiter, author, imc = self.initialize_inmemory_vars(
842 user, repo, message, author)
841 user, repo, message, author)
843
842
844 parent_commit, parents = self.get_parent_commits(parent_commit, scm_instance)
843 parent_commit, parents = self.get_parent_commits(parent_commit, scm_instance)
845
844
846 # add multiple nodes
845 # add multiple nodes
847 for _filename, data in nodes.items():
846 for _filename, data in nodes.items():
848 # new filename, can be renamed from the old one, also sanitaze
847 # new filename, can be renamed from the old one, also sanitaze
849 # the path for any hack around relative paths like ../../ etc.
848 # the path for any hack around relative paths like ../../ etc.
850 filename = self._sanitize_path(data['filename'])
849 filename = self._sanitize_path(data['filename'])
851 old_filename = self._sanitize_path(_filename)
850 old_filename = self._sanitize_path(_filename)
852 content = data['content']
851 content = data['content']
853 file_mode = data.get('mode')
852 file_mode = data.get('mode')
854 filenode = FileNode(old_filename, content=content, mode=file_mode)
853 filenode = FileNode(old_filename, content=content, mode=file_mode)
855 op = data['op']
854 op = data['op']
856 if op == 'add':
855 if op == 'add':
857 imc.add(filenode)
856 imc.add(filenode)
858 elif op == 'del':
857 elif op == 'del':
859 imc.remove(filenode)
858 imc.remove(filenode)
860 elif op == 'mod':
859 elif op == 'mod':
861 if filename != old_filename:
860 if filename != old_filename:
862 # TODO: handle renames more efficient, needs vcs lib changes
861 # TODO: handle renames more efficient, needs vcs lib changes
863 imc.remove(filenode)
862 imc.remove(filenode)
864 imc.add(FileNode(filename, content=content, mode=file_mode))
863 imc.add(FileNode(filename, content=content, mode=file_mode))
865 else:
864 else:
866 imc.change(filenode)
865 imc.change(filenode)
867
866
868 try:
867 try:
869 # TODO: handle pre push scenario commit changes
868 # TODO: handle pre push scenario commit changes
870 tip = imc.commit(message=message,
869 tip = imc.commit(message=message,
871 author=author,
870 author=author,
872 parents=parents,
871 parents=parents,
873 branch=parent_commit.branch)
872 branch=parent_commit.branch)
874 except NodeNotChangedError:
873 except NodeNotChangedError:
875 raise
874 raise
876 except Exception as e:
875 except Exception as e:
877 log.exception("Unexpected exception during call to imc.commit")
876 log.exception("Unexpected exception during call to imc.commit")
878 raise IMCCommitError(str(e))
877 raise IMCCommitError(str(e))
879 finally:
878 finally:
880 # always clear caches, if commit fails we want fresh object also
879 # always clear caches, if commit fails we want fresh object also
881 self.mark_for_invalidation(repo.repo_name)
880 self.mark_for_invalidation(repo.repo_name)
882
881
883 if trigger_push_hook:
882 if trigger_push_hook:
884 hooks_utils.trigger_post_push_hook(
883 hooks_utils.trigger_post_push_hook(
885 username=user.username, action='push_local', hook_type='post_push',
884 username=user.username, action='push_local', hook_type='post_push',
886 repo_name=repo.repo_name, repo_type=scm_instance.alias,
885 repo_name=repo.repo_name, repo_type=scm_instance.alias,
887 commit_ids=[tip.raw_id])
886 commit_ids=[tip.raw_id])
888
887
889 return tip
888 return tip
890
889
891 def update_binary_node(self, user, repo, message, node, parent_commit=None, author=None):
890 def update_binary_node(self, user, repo, message, node, parent_commit=None, author=None):
892 user, scm_instance, message, commiter, author, imc = self.initialize_inmemory_vars(
891 user, scm_instance, message, commiter, author, imc = self.initialize_inmemory_vars(
893 user, repo, message, author)
892 user, repo, message, author)
894
893
895 parent_commit, parents = self.get_parent_commits(parent_commit, scm_instance)
894 parent_commit, parents = self.get_parent_commits(parent_commit, scm_instance)
896
895
897 file_path = node.get('file_path')
896 file_path = node.get('file_path')
898 if isinstance(raw_content := node.get('content'), (io.BytesIO, io.BufferedRandom)):
897 if isinstance(raw_content := node.get('content'), (io.BytesIO, io.BufferedRandom)):
899 content = raw_content.read()
898 content = raw_content.read()
900 else:
899 else:
901 raise Exception("Wrong content was provided")
900 raise Exception("Wrong content was provided")
902 file_node = FileNode(file_path, content=content)
901 file_node = FileNode(file_path, content=content)
903 imc.change(file_node)
902 imc.change(file_node)
904
903
905 try:
904 try:
906 tip = imc.commit(message=message,
905 tip = imc.commit(message=message,
907 author=author,
906 author=author,
908 parents=parents,
907 parents=parents,
909 branch=parent_commit.branch)
908 branch=parent_commit.branch)
910 except NodeNotChangedError:
909 except NodeNotChangedError:
911 raise
910 raise
912 except Exception as e:
911 except Exception as e:
913 log.exception("Unexpected exception during call to imc.commit")
912 log.exception("Unexpected exception during call to imc.commit")
914 raise IMCCommitError(str(e))
913 raise IMCCommitError(str(e))
915 finally:
914 finally:
916 self.mark_for_invalidation(repo.repo_name)
915 self.mark_for_invalidation(repo.repo_name)
917
916
918 hooks_utils.trigger_post_push_hook(
917 hooks_utils.trigger_post_push_hook(
919 username=user.username, action='push_local', hook_type='post_push',
918 username=user.username, action='push_local', hook_type='post_push',
920 repo_name=repo.repo_name, repo_type=scm_instance.alias,
919 repo_name=repo.repo_name, repo_type=scm_instance.alias,
921 commit_ids=[tip.raw_id])
920 commit_ids=[tip.raw_id])
922 return tip
921 return tip
923
922
924 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
923 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
925 author=None, trigger_push_hook=True):
924 author=None, trigger_push_hook=True):
926 """
925 """
927 Deletes given multiple nodes into `repo`
926 Deletes given multiple nodes into `repo`
928
927
929 :param user: RhodeCode User object or user_id, the committer
928 :param user: RhodeCode User object or user_id, the committer
930 :param repo: RhodeCode Repository object
929 :param repo: RhodeCode Repository object
931 :param message: commit message
930 :param message: commit message
932 :param nodes: mapping {filename:{'content':content},...}
931 :param nodes: mapping {filename:{'content':content},...}
933 :param parent_commit: parent commit, can be empty than it's initial
932 :param parent_commit: parent commit, can be empty than it's initial
934 commit
933 commit
935 :param author: author of commit, cna be different that commiter only
934 :param author: author of commit, cna be different that commiter only
936 for git
935 for git
937 :param trigger_push_hook: trigger push hooks
936 :param trigger_push_hook: trigger push hooks
938
937
939 :returns: new commit after deletion
938 :returns: new commit after deletion
940 """
939 """
941
940
942 user, scm_instance, message, commiter, author, imc = self.initialize_inmemory_vars(
941 user, scm_instance, message, commiter, author, imc = self.initialize_inmemory_vars(
943 user, repo, message, author)
942 user, repo, message, author)
944
943
945 processed_nodes = []
944 processed_nodes = []
946 for f_path in nodes:
945 for f_path in nodes:
947 f_path = self._sanitize_path(f_path)
946 f_path = self._sanitize_path(f_path)
948 # content can be empty but for compatibility it allows same dicts
947 # content can be empty but for compatibility it allows same dicts
949 # structure as add_nodes
948 # structure as add_nodes
950 content = nodes[f_path].get('content')
949 content = nodes[f_path].get('content')
951 processed_nodes.append((safe_bytes(f_path), content))
950 processed_nodes.append((safe_bytes(f_path), content))
952
951
953 parent_commit, parents = self.get_parent_commits(parent_commit, scm_instance)
952 parent_commit, parents = self.get_parent_commits(parent_commit, scm_instance)
954
953
955 # add multiple nodes
954 # add multiple nodes
956 for path, content in processed_nodes:
955 for path, content in processed_nodes:
957 imc.remove(FileNode(path, content=content))
956 imc.remove(FileNode(path, content=content))
958
957
959 # TODO: handle pre push scenario
958 # TODO: handle pre push scenario
960 tip = imc.commit(message=message,
959 tip = imc.commit(message=message,
961 author=author,
960 author=author,
962 parents=parents,
961 parents=parents,
963 branch=parent_commit.branch)
962 branch=parent_commit.branch)
964
963
965 self.mark_for_invalidation(repo.repo_name)
964 self.mark_for_invalidation(repo.repo_name)
966 if trigger_push_hook:
965 if trigger_push_hook:
967 hooks_utils.trigger_post_push_hook(
966 hooks_utils.trigger_post_push_hook(
968 username=user.username, action='push_local', hook_type='post_push',
967 username=user.username, action='push_local', hook_type='post_push',
969 repo_name=repo.repo_name, repo_type=scm_instance.alias,
968 repo_name=repo.repo_name, repo_type=scm_instance.alias,
970 commit_ids=[tip.raw_id])
969 commit_ids=[tip.raw_id])
971 return tip
970 return tip
972
971
973 def strip(self, repo, commit_id, branch):
972 def strip(self, repo, commit_id, branch):
974 scm_instance = repo.scm_instance(cache=False)
973 scm_instance = repo.scm_instance(cache=False)
975 scm_instance.config.clear_section('hooks')
974 scm_instance.config.clear_section('hooks')
976 scm_instance.strip(commit_id, branch)
975 scm_instance.strip(commit_id, branch)
977 self.mark_for_invalidation(repo.repo_name)
976 self.mark_for_invalidation(repo.repo_name)
978
977
979 def get_unread_journal(self):
978 def get_unread_journal(self):
980 return self.sa.query(UserLog).count()
979 return self.sa.query(UserLog).count()
981
980
982 @classmethod
981 @classmethod
983 def backend_landing_ref(cls, repo_type):
982 def backend_landing_ref(cls, repo_type):
984 """
983 """
985 Return a default landing ref based on a repository type.
984 Return a default landing ref based on a repository type.
986 """
985 """
987
986
988 landing_ref = {
987 landing_ref = {
989 'hg': ('branch:default', 'default'),
988 'hg': ('branch:default', 'default'),
990 'git': ('branch:master', 'master'),
989 'git': ('branch:master', 'master'),
991 'svn': ('rev:tip', 'latest tip'),
990 'svn': ('rev:tip', 'latest tip'),
992 'default': ('rev:tip', 'latest tip'),
991 'default': ('rev:tip', 'latest tip'),
993 }
992 }
994
993
995 return landing_ref.get(repo_type) or landing_ref['default']
994 return landing_ref.get(repo_type) or landing_ref['default']
996
995
997 def get_repo_landing_revs(self, translator, repo=None):
996 def get_repo_landing_revs(self, translator, repo=None):
998 """
997 """
999 Generates select option with tags branches and bookmarks (for hg only)
998 Generates select option with tags branches and bookmarks (for hg only)
1000 grouped by type
999 grouped by type
1001
1000
1002 :param repo:
1001 :param repo:
1003 """
1002 """
1004 from rhodecode.lib.vcs.backends.git import GitRepository
1003 from rhodecode.lib.vcs.backends.git import GitRepository
1005
1004
1006 _ = translator
1005 _ = translator
1007 repo = self._get_repo(repo)
1006 repo = self._get_repo(repo)
1008
1007
1009 if repo:
1008 if repo:
1010 repo_type = repo.repo_type
1009 repo_type = repo.repo_type
1011 else:
1010 else:
1012 repo_type = 'default'
1011 repo_type = 'default'
1013
1012
1014 default_landing_ref, landing_ref_lbl = self.backend_landing_ref(repo_type)
1013 default_landing_ref, landing_ref_lbl = self.backend_landing_ref(repo_type)
1015
1014
1016 default_ref_options = [
1015 default_ref_options = [
1017 [default_landing_ref, landing_ref_lbl]
1016 [default_landing_ref, landing_ref_lbl]
1018 ]
1017 ]
1019 default_choices = [
1018 default_choices = [
1020 default_landing_ref
1019 default_landing_ref
1021 ]
1020 ]
1022
1021
1023 if not repo:
1022 if not repo:
1024 # presented at NEW repo creation
1023 # presented at NEW repo creation
1025 return default_choices, default_ref_options
1024 return default_choices, default_ref_options
1026
1025
1027 repo = repo.scm_instance()
1026 repo = repo.scm_instance()
1028
1027
1029 ref_options = [(default_landing_ref, landing_ref_lbl)]
1028 ref_options = [(default_landing_ref, landing_ref_lbl)]
1030 choices = [default_landing_ref]
1029 choices = [default_landing_ref]
1031
1030
1032 # branches
1031 # branches
1033 branch_group = [(f'branch:{safe_str(b)}', safe_str(b)) for b in repo.branches]
1032 branch_group = [(f'branch:{safe_str(b)}', safe_str(b)) for b in repo.branches]
1034 if not branch_group:
1033 if not branch_group:
1035 # new repo, or without maybe a branch?
1034 # new repo, or without maybe a branch?
1036 branch_group = default_ref_options
1035 branch_group = default_ref_options
1037
1036
1038 branches_group = (branch_group, _("Branches"))
1037 branches_group = (branch_group, _("Branches"))
1039 ref_options.append(branches_group)
1038 ref_options.append(branches_group)
1040 choices.extend([x[0] for x in branches_group[0]])
1039 choices.extend([x[0] for x in branches_group[0]])
1041
1040
1042 # bookmarks for HG
1041 # bookmarks for HG
1043 if repo.alias == 'hg':
1042 if repo.alias == 'hg':
1044 bookmarks_group = (
1043 bookmarks_group = (
1045 [(f'book:{safe_str(b)}', safe_str(b))
1044 [(f'book:{safe_str(b)}', safe_str(b))
1046 for b in repo.bookmarks],
1045 for b in repo.bookmarks],
1047 _("Bookmarks"))
1046 _("Bookmarks"))
1048 ref_options.append(bookmarks_group)
1047 ref_options.append(bookmarks_group)
1049 choices.extend([x[0] for x in bookmarks_group[0]])
1048 choices.extend([x[0] for x in bookmarks_group[0]])
1050
1049
1051 # tags
1050 # tags
1052 tags_group = (
1051 tags_group = (
1053 [(f'tag:{safe_str(t)}', safe_str(t))
1052 [(f'tag:{safe_str(t)}', safe_str(t))
1054 for t in repo.tags],
1053 for t in repo.tags],
1055 _("Tags"))
1054 _("Tags"))
1056 ref_options.append(tags_group)
1055 ref_options.append(tags_group)
1057 choices.extend([x[0] for x in tags_group[0]])
1056 choices.extend([x[0] for x in tags_group[0]])
1058
1057
1059 return choices, ref_options
1058 return choices, ref_options
1060
1059
1061 def get_server_info(self, environ=None):
1060 def get_server_info(self, environ=None):
1062 server_info = get_system_info(environ)
1061 server_info = get_system_info(environ)
1063 return server_info
1062 return server_info
@@ -1,744 +1,748 b''
1
1 # Copyright (C) 2010-2024 RhodeCode GmbH
2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
7 #
6 #
8 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
10 # GNU General Public License for more details.
12 #
11 #
13 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
14 #
16 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
18
20
19
21 """
20 """
22 Package for testing various lib/helper functions in rhodecode
21 Package for testing various lib/helper functions in rhodecode
23 """
22 """
24
23
25 import datetime
24 import datetime
26 import string
25 import string
27 import mock
26 import mock
28 import pytest
27 import pytest
29 import functools
28 import functools
29 import time
30
30
31 from rhodecode.tests import no_newline_id_generator
31 from rhodecode.tests import no_newline_id_generator
32 from rhodecode.tests.utils import run_test_concurrently
32 from rhodecode.tests.utils import run_test_concurrently
33
33
34 from rhodecode.lib import rc_cache
34 from rhodecode.lib import rc_cache
35 from rhodecode.lib.helpers import InitialsGravatar
35 from rhodecode.lib.helpers import InitialsGravatar
36 from rhodecode.lib.utils2 import AttributeDict
36 from rhodecode.lib.utils2 import AttributeDict
37
37
38 from rhodecode.model.db import Repository, CacheKey
38 from rhodecode.model.db import Repository, CacheKey
39
39
40
40
41 TEST_URLS = [
41 TEST_URLS = [
42 ('127.0.0.1', '127.0.0.1'),
42 ('127.0.0.1', '127.0.0.1'),
43 ('marcink@127.0.0.1', '127.0.0.1'),
43 ('marcink@127.0.0.1', '127.0.0.1'),
44 ('marcink:pass@127.0.0.1', '127.0.0.1'),
44 ('marcink:pass@127.0.0.1', '127.0.0.1'),
45 ('marcink@domain.name:pass@127.0.0.1', '127.0.0.1'),
45 ('marcink@domain.name:pass@127.0.0.1', '127.0.0.1'),
46
46
47 ('127.0.0.1:8080', '127.0.0.1:8080'),
47 ('127.0.0.1:8080', '127.0.0.1:8080'),
48 ('marcink@127.0.0.1:8080', '127.0.0.1:8080'),
48 ('marcink@127.0.0.1:8080', '127.0.0.1:8080'),
49 ('marcink:pass@127.0.0.1:8080', '127.0.0.1:8080'),
49 ('marcink:pass@127.0.0.1:8080', '127.0.0.1:8080'),
50 ('marcink@domain.name:pass@127.0.0.1:8080', '127.0.0.1:8080'),
50 ('marcink@domain.name:pass@127.0.0.1:8080', '127.0.0.1:8080'),
51
51
52 ('domain.org', 'domain.org'),
52 ('domain.org', 'domain.org'),
53 ('user:pass@domain.org:8080', 'domain.org:8080'),
53 ('user:pass@domain.org:8080', 'domain.org:8080'),
54 ('user@domain.org:pass@domain.org:8080', 'domain.org:8080'),
54 ('user@domain.org:pass@domain.org:8080', 'domain.org:8080'),
55 ]
55 ]
56
56
57
57
58 @pytest.mark.parametrize("protocol", ['http://', 'https://'])
58 @pytest.mark.parametrize("protocol", ['http://', 'https://'])
59 @pytest.mark.parametrize("test_url, expected", TEST_URLS)
59 @pytest.mark.parametrize("test_url, expected", TEST_URLS)
60 def test_credentials_filter(protocol, test_url, expected):
60 def test_credentials_filter(protocol, test_url, expected):
61 from rhodecode.lib.utils2 import credentials_filter
61 from rhodecode.lib.utils2 import credentials_filter
62 test_url = protocol + test_url
62 test_url = protocol + test_url
63 assert credentials_filter(test_url) == protocol + expected
63 assert credentials_filter(test_url) == protocol + expected
64
64
65
65
66 @pytest.mark.parametrize("str_bool, expected", [
66 @pytest.mark.parametrize("str_bool, expected", [
67 ('t', True),
67 ('t', True),
68 ('true', True),
68 ('true', True),
69 ('y', True),
69 ('y', True),
70 ('yes', True),
70 ('yes', True),
71 ('on', True),
71 ('on', True),
72 ('1', True),
72 ('1', True),
73 ('Y', True),
73 ('Y', True),
74 ('yeS', True),
74 ('yeS', True),
75 ('Y', True),
75 ('Y', True),
76 ('TRUE', True),
76 ('TRUE', True),
77 ('T', True),
77 ('T', True),
78 ('False', False),
78 ('False', False),
79 ('F', False),
79 ('F', False),
80 ('FALSE', False),
80 ('FALSE', False),
81 ('0', False),
81 ('0', False),
82 ('-1', False),
82 ('-1', False),
83 ('', False)
83 ('', False)
84 ])
84 ])
85 def test_str2bool(str_bool, expected):
85 def test_str2bool(str_bool, expected):
86 from rhodecode.lib.utils2 import str2bool
86 from rhodecode.lib.utils2 import str2bool
87 assert str2bool(str_bool) == expected
87 assert str2bool(str_bool) == expected
88
88
89
89
90 @pytest.mark.parametrize("text, expected", functools.reduce(lambda a1, a2: a1+a2, [
90 @pytest.mark.parametrize("text, expected", functools.reduce(lambda a1, a2: a1+a2, [
91 [
91 [
92 (pref+"", []),
92 (pref+"", []),
93 (pref+"Hi there @marcink", ['marcink']),
93 (pref+"Hi there @marcink", ['marcink']),
94 (pref+"Hi there @marcink and @bob", ['bob', 'marcink']),
94 (pref+"Hi there @marcink and @bob", ['bob', 'marcink']),
95 (pref+"Hi there @marcink\n", ['marcink']),
95 (pref+"Hi there @marcink\n", ['marcink']),
96 (pref+"Hi there @marcink and @bob\n", ['bob', 'marcink']),
96 (pref+"Hi there @marcink and @bob\n", ['bob', 'marcink']),
97 (pref+"Hi there marcin@rhodecode.com", []),
97 (pref+"Hi there marcin@rhodecode.com", []),
98 (pref+"Hi there @john.malcovic and @bob\n", ['bob', 'john.malcovic']),
98 (pref+"Hi there @john.malcovic and @bob\n", ['bob', 'john.malcovic']),
99 (pref+"This needs to be reviewed: (@marcink,@john)", ["john", "marcink"]),
99 (pref+"This needs to be reviewed: (@marcink,@john)", ["john", "marcink"]),
100 (pref+"This needs to be reviewed: (@marcink, @john)", ["john", "marcink"]),
100 (pref+"This needs to be reviewed: (@marcink, @john)", ["john", "marcink"]),
101 (pref+"This needs to be reviewed: [@marcink,@john]", ["john", "marcink"]),
101 (pref+"This needs to be reviewed: [@marcink,@john]", ["john", "marcink"]),
102 (pref+"This needs to be reviewed: (@marcink @john)", ["john", "marcink"]),
102 (pref+"This needs to be reviewed: (@marcink @john)", ["john", "marcink"]),
103 (pref+"@john @mary, please review", ["john", "mary"]),
103 (pref+"@john @mary, please review", ["john", "mary"]),
104 (pref+"@john,@mary, please review", ["john", "mary"]),
104 (pref+"@john,@mary, please review", ["john", "mary"]),
105 (pref+"Hej @123, @22john,@mary, please review", ['123', '22john', 'mary']),
105 (pref+"Hej @123, @22john,@mary, please review", ['123', '22john', 'mary']),
106 (pref+"@first hi there @marcink here's my email marcin@email.com "
106 (pref+"@first hi there @marcink here's my email marcin@email.com "
107 "@lukaszb check @one_more22 it pls @ ttwelve @D[] @one@two@three ", ['first', 'lukaszb', 'marcink', 'one', 'one_more22']),
107 "@lukaszb check @one_more22 it pls @ ttwelve @D[] @one@two@three ", ['first', 'lukaszb', 'marcink', 'one', 'one_more22']),
108 (pref+"@MARCIN @maRCiN @2one_more22 @john please see this http://org.pl", ['2one_more22', 'john', 'MARCIN', 'maRCiN']),
108 (pref+"@MARCIN @maRCiN @2one_more22 @john please see this http://org.pl", ['2one_more22', 'john', 'MARCIN', 'maRCiN']),
109 (pref+"@marian.user just do it @marco-polo and next extract @marco_polo", ['marco-polo', 'marco_polo', 'marian.user']),
109 (pref+"@marian.user just do it @marco-polo and next extract @marco_polo", ['marco-polo', 'marco_polo', 'marian.user']),
110 (pref+"user.dot hej ! not-needed maril@domain.org", []),
110 (pref+"user.dot hej ! not-needed maril@domain.org", []),
111 (pref+"\n@marcin", ['marcin']),
111 (pref+"\n@marcin", ['marcin']),
112 ]
112 ]
113 for pref in ['', '\n', 'hi !', '\t', '\n\n']]), ids=no_newline_id_generator)
113 for pref in ['', '\n', 'hi !', '\t', '\n\n']]), ids=no_newline_id_generator)
114 def test_mention_extractor(text, expected):
114 def test_mention_extractor(text, expected):
115 from rhodecode.lib.utils2 import extract_mentioned_users
115 from rhodecode.lib.utils2 import extract_mentioned_users
116 got = extract_mentioned_users(text)
116 got = extract_mentioned_users(text)
117 assert sorted(got, key=lambda x: x.lower()) == got
117 assert sorted(got, key=lambda x: x.lower()) == got
118 assert set(expected) == set(got)
118 assert set(expected) == set(got)
119
119
120 @pytest.mark.parametrize("age_args, expected, kw", [
120 @pytest.mark.parametrize("age_args, expected, kw", [
121 ({}, u'just now', {}),
121 ({}, u'just now', {}),
122 ({'seconds': -1}, u'1 second ago', {}),
122 ({'seconds': -1}, u'1 second ago', {}),
123 ({'seconds': -60 * 2}, u'2 minutes ago', {}),
123 ({'seconds': -60 * 2}, u'2 minutes ago', {}),
124 ({'hours': -1}, u'1 hour ago', {}),
124 ({'hours': -1}, u'1 hour ago', {}),
125 ({'hours': -24}, u'1 day ago', {}),
125 ({'hours': -24}, u'1 day ago', {}),
126 ({'hours': -24 * 5}, u'5 days ago', {}),
126 ({'hours': -24 * 5}, u'5 days ago', {}),
127 ({'months': -1}, u'1 month ago', {}),
127 ({'months': -1}, u'1 month ago', {}),
128 ({'months': -1, 'days': -2}, u'1 month and 2 days ago', {}),
128 ({'months': -1, 'days': -2}, u'1 month and 2 days ago', {}),
129 ({'years': -1, 'months': -1}, u'1 year and 1 month ago', {}),
129 ({'years': -1, 'months': -1}, u'1 year and 1 month ago', {}),
130 ({}, u'just now', {'short_format': True}),
130 ({}, u'just now', {'short_format': True}),
131 ({'seconds': -1}, u'1sec ago', {'short_format': True}),
131 ({'seconds': -1}, u'1sec ago', {'short_format': True}),
132 ({'seconds': -60 * 2}, u'2min ago', {'short_format': True}),
132 ({'seconds': -60 * 2}, u'2min ago', {'short_format': True}),
133 ({'hours': -1}, u'1h ago', {'short_format': True}),
133 ({'hours': -1}, u'1h ago', {'short_format': True}),
134 ({'hours': -24}, u'1d ago', {'short_format': True}),
134 ({'hours': -24}, u'1d ago', {'short_format': True}),
135 ({'hours': -24 * 5}, u'5d ago', {'short_format': True}),
135 ({'hours': -24 * 5}, u'5d ago', {'short_format': True}),
136 ({'months': -1}, u'1m ago', {'short_format': True}),
136 ({'months': -1}, u'1m ago', {'short_format': True}),
137 ({'months': -1, 'days': -2}, u'1m, 2d ago', {'short_format': True}),
137 ({'months': -1, 'days': -2}, u'1m, 2d ago', {'short_format': True}),
138 ({'years': -1, 'months': -1}, u'1y, 1m ago', {'short_format': True}),
138 ({'years': -1, 'months': -1}, u'1y, 1m ago', {'short_format': True}),
139 ])
139 ])
140 def test_age(age_args, expected, kw, baseapp):
140 def test_age(age_args, expected, kw, baseapp):
141 from rhodecode.lib.utils2 import age
141 from rhodecode.lib.utils2 import age
142 from dateutil import relativedelta
142 from dateutil import relativedelta
143 n = datetime.datetime(year=2012, month=5, day=17)
143 n = datetime.datetime(year=2012, month=5, day=17)
144 def delt(*args, **kwargs):
144 def delt(*args, **kwargs):
145 return relativedelta.relativedelta(*args, **kwargs)
145 return relativedelta.relativedelta(*args, **kwargs)
146
146
147 def translate(elem):
147 def translate(elem):
148 return elem.interpolate()
148 return elem.interpolate()
149
149
150 assert translate(age(n + delt(**age_args), now=n, **kw)) == expected
150 assert translate(age(n + delt(**age_args), now=n, **kw)) == expected
151
151
152
152
153 @pytest.mark.parametrize("age_args, expected, kw", [
153 @pytest.mark.parametrize("age_args, expected, kw", [
154 ({}, u'just now', {}),
154 ({}, u'just now', {}),
155 ({'seconds': 1}, u'in 1 second', {}),
155 ({'seconds': 1}, u'in 1 second', {}),
156 ({'seconds': 60 * 2}, u'in 2 minutes', {}),
156 ({'seconds': 60 * 2}, u'in 2 minutes', {}),
157 ({'hours': 1}, u'in 1 hour', {}),
157 ({'hours': 1}, u'in 1 hour', {}),
158 ({'hours': 24}, u'in 1 day', {}),
158 ({'hours': 24}, u'in 1 day', {}),
159 ({'hours': 24 * 5}, u'in 5 days', {}),
159 ({'hours': 24 * 5}, u'in 5 days', {}),
160 ({'months': 1}, u'in 1 month', {}),
160 ({'months': 1}, u'in 1 month', {}),
161 ({'months': 1, 'days': 1}, u'in 1 month and 1 day', {}),
161 ({'months': 1, 'days': 1}, u'in 1 month and 1 day', {}),
162 ({'years': 1, 'months': 1}, u'in 1 year and 1 month', {}),
162 ({'years': 1, 'months': 1}, u'in 1 year and 1 month', {}),
163 ({}, u'just now', {'short_format': True}),
163 ({}, u'just now', {'short_format': True}),
164 ({'seconds': 1}, u'in 1sec', {'short_format': True}),
164 ({'seconds': 1}, u'in 1sec', {'short_format': True}),
165 ({'seconds': 60 * 2}, u'in 2min', {'short_format': True}),
165 ({'seconds': 60 * 2}, u'in 2min', {'short_format': True}),
166 ({'hours': 1}, u'in 1h', {'short_format': True}),
166 ({'hours': 1}, u'in 1h', {'short_format': True}),
167 ({'hours': 24}, u'in 1d', {'short_format': True}),
167 ({'hours': 24}, u'in 1d', {'short_format': True}),
168 ({'hours': 24 * 5}, u'in 5d', {'short_format': True}),
168 ({'hours': 24 * 5}, u'in 5d', {'short_format': True}),
169 ({'months': 1}, u'in 1m', {'short_format': True}),
169 ({'months': 1}, u'in 1m', {'short_format': True}),
170 ({'months': 1, 'days': 1}, u'in 1m, 1d', {'short_format': True}),
170 ({'months': 1, 'days': 1}, u'in 1m, 1d', {'short_format': True}),
171 ({'years': 1, 'months': 1}, u'in 1y, 1m', {'short_format': True}),
171 ({'years': 1, 'months': 1}, u'in 1y, 1m', {'short_format': True}),
172 ])
172 ])
173 def test_age_in_future(age_args, expected, kw, baseapp):
173 def test_age_in_future(age_args, expected, kw, baseapp):
174 from rhodecode.lib.utils2 import age
174 from rhodecode.lib.utils2 import age
175 from dateutil import relativedelta
175 from dateutil import relativedelta
176 n = datetime.datetime(year=2012, month=5, day=17)
176 n = datetime.datetime(year=2012, month=5, day=17)
177 def delt(*args, **kwargs):
177 def delt(*args, **kwargs):
178 return relativedelta.relativedelta(*args, **kwargs)
178 return relativedelta.relativedelta(*args, **kwargs)
179
179
180 def translate(elem):
180 def translate(elem):
181 return elem.interpolate()
181 return elem.interpolate()
182
182
183 assert translate(age(n + delt(**age_args), now=n, **kw)) == expected
183 assert translate(age(n + delt(**age_args), now=n, **kw)) == expected
184
184
185
185
186 @pytest.mark.parametrize("sample, expected_tags", [
186 @pytest.mark.parametrize("sample, expected_tags", [
187 # entry
187 # entry
188 ((
188 ((
189 ""
189 ""
190 ),
190 ),
191 [
191 [
192
192
193 ]),
193 ]),
194 # entry
194 # entry
195 ((
195 ((
196 "hello world [stale]"
196 "hello world [stale]"
197 ),
197 ),
198 [
198 [
199 ('state', '[stale]'),
199 ('state', '[stale]'),
200 ]),
200 ]),
201 # entry
201 # entry
202 ((
202 ((
203 "hello world [v2.0.0] [v1.0.0]"
203 "hello world [v2.0.0] [v1.0.0]"
204 ),
204 ),
205 [
205 [
206 ('generic', '[v2.0.0]'),
206 ('generic', '[v2.0.0]'),
207 ('generic', '[v1.0.0]'),
207 ('generic', '[v1.0.0]'),
208 ]),
208 ]),
209 # entry
209 # entry
210 ((
210 ((
211 "he[ll]o wo[rl]d"
211 "he[ll]o wo[rl]d"
212 ),
212 ),
213 [
213 [
214 ('label', '[ll]'),
214 ('label', '[ll]'),
215 ('label', '[rl]'),
215 ('label', '[rl]'),
216 ]),
216 ]),
217 # entry
217 # entry
218 ((
218 ((
219 "hello world [stale]\n[featured]\n[stale] [dead] [dev]"
219 "hello world [stale]\n[featured]\n[stale] [dead] [dev]"
220 ),
220 ),
221 [
221 [
222 ('state', '[stale]'),
222 ('state', '[stale]'),
223 ('state', '[featured]'),
223 ('state', '[featured]'),
224 ('state', '[stale]'),
224 ('state', '[stale]'),
225 ('state', '[dead]'),
225 ('state', '[dead]'),
226 ('state', '[dev]'),
226 ('state', '[dev]'),
227 ]),
227 ]),
228 # entry
228 # entry
229 ((
229 ((
230 "hello world \n\n [stale] \n [url =&gt; [name](http://rc.com)]"
230 "hello world \n\n [stale] \n [url =&gt; [name](http://rc.com)]"
231 ),
231 ),
232 [
232 [
233 ('state', '[stale]'),
233 ('state', '[stale]'),
234 ('url', '[url =&gt; [name](http://rc.com)]'),
234 ('url', '[url =&gt; [name](http://rc.com)]'),
235 ]),
235 ]),
236 # entry
236 # entry
237 ((
237 ((
238 "[url =&gt; [linkNameJS](javascript:alert(document.domain))]\n"
238 "[url =&gt; [linkNameJS](javascript:alert(document.domain))]\n"
239 "[url =&gt; [linkNameHTTP](http://rhodecode.com)]\n"
239 "[url =&gt; [linkNameHTTP](http://rhodecode.com)]\n"
240 "[url =&gt; [linkNameHTTPS](https://rhodecode.com)]\n"
240 "[url =&gt; [linkNameHTTPS](https://rhodecode.com)]\n"
241 "[url =&gt; [linkNamePath](/repo_group)]\n"
241 "[url =&gt; [linkNamePath](/repo_group)]\n"
242 ),
242 ),
243 [
243 [
244 ('generic', '[linkNameJS]'),
244 ('generic', '[linkNameJS]'),
245 ('url', '[url =&gt; [linkNameHTTP](http://rhodecode.com)]'),
245 ('url', '[url =&gt; [linkNameHTTP](http://rhodecode.com)]'),
246 ('url', '[url =&gt; [linkNameHTTPS](https://rhodecode.com)]'),
246 ('url', '[url =&gt; [linkNameHTTPS](https://rhodecode.com)]'),
247 ('url', '[url =&gt; [linkNamePath](/repo_group)]'),
247 ('url', '[url =&gt; [linkNamePath](/repo_group)]'),
248 ]),
248 ]),
249 # entry
249 # entry
250 ((
250 ((
251 "hello pta[tag] gog [[]] [[] sda ero[or]d [me =&gt;>< sa]"
251 "hello pta[tag] gog [[]] [[] sda ero[or]d [me =&gt;>< sa]"
252 "[requires] [stale] [see<>=&gt;] [see =&gt; http://url.com]"
252 "[requires] [stale] [see<>=&gt;] [see =&gt; http://url.com]"
253 "[requires =&gt; url] [lang =&gt; python] [just a tag] "
253 "[requires =&gt; url] [lang =&gt; python] [just a tag] "
254 "<html_tag first='abc' attr=\"my.url?attr=&another=\"></html_tag>"
254 "<html_tag first='abc' attr=\"my.url?attr=&another=\"></html_tag>"
255 "[,d] [ =&gt; ULR ] [obsolete] [desc]]"
255 "[,d] [ =&gt; ULR ] [obsolete] [desc]]"
256 ),
256 ),
257 [
257 [
258 ('label', '[desc]'),
258 ('label', '[desc]'),
259 ('label', '[obsolete]'),
259 ('label', '[obsolete]'),
260 ('label', '[or]'),
260 ('label', '[or]'),
261 ('label', '[requires]'),
261 ('label', '[requires]'),
262 ('label', '[tag]'),
262 ('label', '[tag]'),
263 ('state', '[stale]'),
263 ('state', '[stale]'),
264 ('lang', '[lang =&gt; python]'),
264 ('lang', '[lang =&gt; python]'),
265 ('ref', '[requires =&gt; url]'),
265 ('ref', '[requires =&gt; url]'),
266 ('see', '[see =&gt; http://url.com]'),
266 ('see', '[see =&gt; http://url.com]'),
267
267
268 ]),
268 ]),
269
269
270 ], ids=no_newline_id_generator)
270 ], ids=no_newline_id_generator)
271 def test_metatag_extraction(sample, expected_tags):
271 def test_metatag_extraction(sample, expected_tags):
272 from rhodecode.lib.helpers import extract_metatags
272 from rhodecode.lib.helpers import extract_metatags
273 tags, value = extract_metatags(sample)
273 tags, value = extract_metatags(sample)
274 assert sorted(tags) == sorted(expected_tags)
274 assert sorted(tags) == sorted(expected_tags)
275
275
276
276
277 @pytest.mark.parametrize("tag_data, expected_html", [
277 @pytest.mark.parametrize("tag_data, expected_html", [
278
278
279 (('state', '[stable]'), '<div class="metatag" tag="state stable">stable</div>'),
279 (('state', '[stable]'), '<div class="metatag" tag="state stable">stable</div>'),
280 (('state', '[stale]'), '<div class="metatag" tag="state stale">stale</div>'),
280 (('state', '[stale]'), '<div class="metatag" tag="state stale">stale</div>'),
281 (('state', '[featured]'), '<div class="metatag" tag="state featured">featured</div>'),
281 (('state', '[featured]'), '<div class="metatag" tag="state featured">featured</div>'),
282 (('state', '[dev]'), '<div class="metatag" tag="state dev">dev</div>'),
282 (('state', '[dev]'), '<div class="metatag" tag="state dev">dev</div>'),
283 (('state', '[dead]'), '<div class="metatag" tag="state dead">dead</div>'),
283 (('state', '[dead]'), '<div class="metatag" tag="state dead">dead</div>'),
284
284
285 (('label', '[personal]'), '<div class="metatag" tag="label">personal</div>'),
285 (('label', '[personal]'), '<div class="metatag" tag="label">personal</div>'),
286 (('generic', '[v2.0.0]'), '<div class="metatag" tag="generic">v2.0.0</div>'),
286 (('generic', '[v2.0.0]'), '<div class="metatag" tag="generic">v2.0.0</div>'),
287
287
288 (('lang', '[lang =&gt; JavaScript]'), '<div class="metatag" tag="lang">JavaScript</div>'),
288 (('lang', '[lang =&gt; JavaScript]'), '<div class="metatag" tag="lang">JavaScript</div>'),
289 (('lang', '[lang =&gt; C++]'), '<div class="metatag" tag="lang">C++</div>'),
289 (('lang', '[lang =&gt; C++]'), '<div class="metatag" tag="lang">C++</div>'),
290 (('lang', '[lang =&gt; C#]'), '<div class="metatag" tag="lang">C#</div>'),
290 (('lang', '[lang =&gt; C#]'), '<div class="metatag" tag="lang">C#</div>'),
291 (('lang', '[lang =&gt; Delphi/Object]'), '<div class="metatag" tag="lang">Delphi/Object</div>'),
291 (('lang', '[lang =&gt; Delphi/Object]'), '<div class="metatag" tag="lang">Delphi/Object</div>'),
292 (('lang', '[lang =&gt; Objective-C]'), '<div class="metatag" tag="lang">Objective-C</div>'),
292 (('lang', '[lang =&gt; Objective-C]'), '<div class="metatag" tag="lang">Objective-C</div>'),
293 (('lang', '[lang =&gt; .NET]'), '<div class="metatag" tag="lang">.NET</div>'),
293 (('lang', '[lang =&gt; .NET]'), '<div class="metatag" tag="lang">.NET</div>'),
294
294
295 (('license', '[license =&gt; BSD 3-clause]'), '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/BSD 3-clause">BSD 3-clause</a></div>'),
295 (('license', '[license =&gt; BSD 3-clause]'), '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/BSD 3-clause">BSD 3-clause</a></div>'),
296 (('license', '[license =&gt; GPLv3]'), '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/GPLv3">GPLv3</a></div>'),
296 (('license', '[license =&gt; GPLv3]'), '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/GPLv3">GPLv3</a></div>'),
297 (('license', '[license =&gt; MIT]'), '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/MIT">MIT</a></div>'),
297 (('license', '[license =&gt; MIT]'), '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/MIT">MIT</a></div>'),
298 (('license', '[license =&gt; AGPLv3]'), '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/AGPLv3">AGPLv3</a></div>'),
298 (('license', '[license =&gt; AGPLv3]'), '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/AGPLv3">AGPLv3</a></div>'),
299
299
300 (('ref', '[requires =&gt; RepoName]'), '<div class="metatag" tag="ref requires">requires: <a href="/RepoName">RepoName</a></div>'),
300 (('ref', '[requires =&gt; RepoName]'), '<div class="metatag" tag="ref requires">requires: <a href="/RepoName">RepoName</a></div>'),
301 (('ref', '[recommends =&gt; GroupName]'), '<div class="metatag" tag="ref recommends">recommends: <a href="/GroupName">GroupName</a></div>'),
301 (('ref', '[recommends =&gt; GroupName]'), '<div class="metatag" tag="ref recommends">recommends: <a href="/GroupName">GroupName</a></div>'),
302 (('ref', '[conflicts =&gt; SomeName]'), '<div class="metatag" tag="ref conflicts">conflicts: <a href="/SomeName">SomeName</a></div>'),
302 (('ref', '[conflicts =&gt; SomeName]'), '<div class="metatag" tag="ref conflicts">conflicts: <a href="/SomeName">SomeName</a></div>'),
303 (('ref', '[base =&gt; SomeName]'), '<div class="metatag" tag="ref base">base: <a href="/SomeName">SomeName</a></div>'),
303 (('ref', '[base =&gt; SomeName]'), '<div class="metatag" tag="ref base">base: <a href="/SomeName">SomeName</a></div>'),
304
304
305 (('see', '[see =&gt; http://rhodecode.com]'), '<div class="metatag" tag="see">see: http://rhodecode.com </div>'),
305 (('see', '[see =&gt; http://rhodecode.com]'), '<div class="metatag" tag="see">see: http://rhodecode.com </div>'),
306
306
307 (('url', '[url =&gt; [linkName](https://rhodecode.com)]'), '<div class="metatag" tag="url"> <a href="https://rhodecode.com">linkName</a> </div>'),
307 (('url', '[url =&gt; [linkName](https://rhodecode.com)]'), '<div class="metatag" tag="url"> <a href="https://rhodecode.com">linkName</a> </div>'),
308 (('url', '[url =&gt; [example link](https://rhodecode.com)]'), '<div class="metatag" tag="url"> <a href="https://rhodecode.com">example link</a> </div>'),
308 (('url', '[url =&gt; [example link](https://rhodecode.com)]'), '<div class="metatag" tag="url"> <a href="https://rhodecode.com">example link</a> </div>'),
309 (('url', '[url =&gt; [v1.0.0](https://rhodecode.com)]'), '<div class="metatag" tag="url"> <a href="https://rhodecode.com">v1.0.0</a> </div>'),
309 (('url', '[url =&gt; [v1.0.0](https://rhodecode.com)]'), '<div class="metatag" tag="url"> <a href="https://rhodecode.com">v1.0.0</a> </div>'),
310
310
311 ])
311 ])
312 def test_metatags_stylize(tag_data, expected_html):
312 def test_metatags_stylize(tag_data, expected_html):
313 from rhodecode.lib.helpers import style_metatag
313 from rhodecode.lib.helpers import style_metatag
314 tag_type,value = tag_data
314 tag_type,value = tag_data
315 assert style_metatag(tag_type, value) == expected_html
315 assert style_metatag(tag_type, value) == expected_html
316
316
317
317
318 @pytest.mark.parametrize("tmpl_url, email, expected", [
318 @pytest.mark.parametrize("tmpl_url, email, expected", [
319 ('http://test.com/{email}', 'test@foo.com', 'http://test.com/test@foo.com'),
319 ('http://test.com/{email}', 'test@foo.com', 'http://test.com/test@foo.com'),
320
320
321 ('http://test.com/{md5email}', 'test@foo.com', 'http://test.com/3cb7232fcc48743000cb86d0d5022bd9'),
321 ('http://test.com/{md5email}', 'test@foo.com', 'http://test.com/3cb7232fcc48743000cb86d0d5022bd9'),
322 ('http://test.com/{md5email}', 'testΔ…Δ‡@foo.com', 'http://test.com/978debb907a3c55cd741872ab293ef30'),
322 ('http://test.com/{md5email}', 'testΔ…Δ‡@foo.com', 'http://test.com/978debb907a3c55cd741872ab293ef30'),
323
323
324 ('http://testX.com/{md5email}?s={size}', 'test@foo.com', 'http://testX.com/3cb7232fcc48743000cb86d0d5022bd9?s=24'),
324 ('http://testX.com/{md5email}?s={size}', 'test@foo.com', 'http://testX.com/3cb7232fcc48743000cb86d0d5022bd9?s=24'),
325 ('http://testX.com/{md5email}?s={size}', 'testΔ…Δ‡@foo.com', 'http://testX.com/978debb907a3c55cd741872ab293ef30?s=24'),
325 ('http://testX.com/{md5email}?s={size}', 'testΔ…Δ‡@foo.com', 'http://testX.com/978debb907a3c55cd741872ab293ef30?s=24'),
326
326
327 ('{scheme}://{netloc}/{md5email}/{size}', 'test@foo.com', 'https://server.com/3cb7232fcc48743000cb86d0d5022bd9/24'),
327 ('{scheme}://{netloc}/{md5email}/{size}', 'test@foo.com', 'https://server.com/3cb7232fcc48743000cb86d0d5022bd9/24'),
328 ('{scheme}://{netloc}/{md5email}/{size}', 'testΔ…Δ‡@foo.com', 'https://server.com/978debb907a3c55cd741872ab293ef30/24'),
328 ('{scheme}://{netloc}/{md5email}/{size}', 'testΔ…Δ‡@foo.com', 'https://server.com/978debb907a3c55cd741872ab293ef30/24'),
329
329
330 ('http://test.com/{email}', 'testΔ…Δ‡@foo.com', 'http://test.com/testΔ…Δ‡@foo.com'),
330 ('http://test.com/{email}', 'testΔ…Δ‡@foo.com', 'http://test.com/testΔ…Δ‡@foo.com'),
331 ('http://test.com/{email}?size={size}', 'test@foo.com', 'http://test.com/test@foo.com?size=24'),
331 ('http://test.com/{email}?size={size}', 'test@foo.com', 'http://test.com/test@foo.com?size=24'),
332 ('http://test.com/{email}?size={size}', 'testΔ…Δ‡@foo.com', 'http://test.com/testΔ…Δ‡@foo.com?size=24'),
332 ('http://test.com/{email}?size={size}', 'testΔ…Δ‡@foo.com', 'http://test.com/testΔ…Δ‡@foo.com?size=24'),
333 ])
333 ])
334 def test_gravatar_url_builder(tmpl_url, email, expected, request_stub):
334 def test_gravatar_url_builder(tmpl_url, email, expected, request_stub):
335 from rhodecode.lib.helpers import gravatar_url
335 from rhodecode.lib.helpers import gravatar_url
336
336
337 def fake_tmpl_context(_url):
337 def fake_tmpl_context(_url):
338 _c = AttributeDict()
338 _c = AttributeDict()
339 _c.visual = AttributeDict()
339 _c.visual = AttributeDict()
340 _c.visual.use_gravatar = True
340 _c.visual.use_gravatar = True
341 _c.visual.gravatar_url = _url
341 _c.visual.gravatar_url = _url
342 return _c
342 return _c
343
343
344 # mock pyramid.threadlocals
344 # mock pyramid.threadlocals
345 def fake_get_current_request():
345 def fake_get_current_request():
346 request_stub.scheme = 'https'
346 request_stub.scheme = 'https'
347 request_stub.host = 'server.com'
347 request_stub.host = 'server.com'
348
348
349 request_stub._call_context = fake_tmpl_context(tmpl_url)
349 request_stub._call_context = fake_tmpl_context(tmpl_url)
350 return request_stub
350 return request_stub
351
351
352 with mock.patch('rhodecode.lib.helpers.get_current_request',
352 with mock.patch('rhodecode.lib.helpers.get_current_request',
353 fake_get_current_request):
353 fake_get_current_request):
354
354
355 grav = gravatar_url(email_address=email, size=24)
355 grav = gravatar_url(email_address=email, size=24)
356 assert grav == expected
356 assert grav == expected
357
357
358
358
359 @pytest.mark.parametrize(
359 @pytest.mark.parametrize(
360 "email, first_name, last_name, expected_initials, expected_color", [
360 "email, first_name, last_name, expected_initials, expected_color", [
361
361
362 ('test@rhodecode.com', '', '', 'TR', '#8a994d'),
362 ('test@rhodecode.com', '', '', 'TR', '#8a994d'),
363 ('marcin.kuzminski@rhodecode.com', '', '', 'MK', '#6559b3'),
363 ('marcin.kuzminski@rhodecode.com', '', '', 'MK', '#6559b3'),
364 # special cases of email
364 # special cases of email
365 ('john.van.dam@rhodecode.com', '', '', 'JD', '#526600'),
365 ('john.van.dam@rhodecode.com', '', '', 'JD', '#526600'),
366 ('Guido.van.Rossum@rhodecode.com', '', '', 'GR', '#990052'),
366 ('Guido.van.Rossum@rhodecode.com', '', '', 'GR', '#990052'),
367 ('Guido.van.Rossum@rhodecode.com', 'Guido', 'Van Rossum', 'GR', '#990052'),
367 ('Guido.van.Rossum@rhodecode.com', 'Guido', 'Van Rossum', 'GR', '#990052'),
368
368
369 ('rhodecode+Guido.van.Rossum@rhodecode.com', '', '', 'RR', '#46598c'),
369 ('rhodecode+Guido.van.Rossum@rhodecode.com', '', '', 'RR', '#46598c'),
370 ('pclouds@rhodecode.com', 'Nguyα»…n ThΓ‘i', 'Tgọc Duy', 'ND', '#665200'),
370 ('pclouds@rhodecode.com', 'Nguyα»…n ThΓ‘i', 'Tgọc Duy', 'ND', '#665200'),
371
371
372 ('john-brown@foo.com', '', '', 'JF', '#73006b'),
372 ('john-brown@foo.com', '', '', 'JF', '#73006b'),
373 ('admin@rhodecode.com', 'Marcin', 'Kuzminski', 'MK', '#104036'),
373 ('admin@rhodecode.com', 'Marcin', 'Kuzminski', 'MK', '#104036'),
374 # partials
374 # partials
375 ('admin@rhodecode.com', 'Marcin', '', 'MR', '#104036'), # fn+email
375 ('admin@rhodecode.com', 'Marcin', '', 'MR', '#104036'), # fn+email
376 ('admin@rhodecode.com', '', 'Kuzminski', 'AK', '#104036'), # em+ln
376 ('admin@rhodecode.com', '', 'Kuzminski', 'AK', '#104036'), # em+ln
377 # non-ascii
377 # non-ascii
378 ('admin@rhodecode.com', 'Marcin', 'Śuzminski', 'MS', '#104036'),
378 ('admin@rhodecode.com', 'Marcin', 'Śuzminski', 'MS', '#104036'),
379 ('admin@rhodecode.com', 'Łukasz', 'Śuzminski', 'LS', '#104036'),
379 ('admin@rhodecode.com', 'Łukasz', 'Śuzminski', 'LS', '#104036'),
380 ('admin@rhodecode.com', 'Fabian', 'Łukaszewski', 'FL', '#104036'),
380 ('admin@rhodecode.com', 'Fabian', 'Łukaszewski', 'FL', '#104036'),
381
381
382 ('marcin.Ε›uzminski@rhodecode.com', '', '', 'MS', '#73000f'),
382 ('marcin.Ε›uzminski@rhodecode.com', '', '', 'MS', '#73000f'),
383
383
384 # special cases, LDAP can provide those...
384 # special cases, LDAP can provide those...
385 ('admin@', 'Marcin', 'Śuzminski', 'MS', '#aa00ff'),
385 ('admin@', 'Marcin', 'Śuzminski', 'MS', '#aa00ff'),
386 ('marcin.Ε›uzminski', '', '', 'MS', '#402020'),
386 ('marcin.Ε›uzminski', '', '', 'MS', '#402020'),
387 ('null', '', '', 'NL', '#8c4646'),
387 ('null', '', '', 'NL', '#8c4646'),
388 ('some.@abc.com', 'some', '', 'SA', '#664e33')
388 ('some.@abc.com', 'some', '', 'SA', '#664e33')
389 ])
389 ])
390 def test_initials_gravatar_pick_of_initials_and_color_algo(
390 def test_initials_gravatar_pick_of_initials_and_color_algo(
391 email, first_name, last_name, expected_initials, expected_color):
391 email, first_name, last_name, expected_initials, expected_color):
392 instance = InitialsGravatar(email, first_name, last_name)
392 instance = InitialsGravatar(email, first_name, last_name)
393 assert instance.get_initials() == expected_initials
393 assert instance.get_initials() == expected_initials
394 assert instance.str2color(email) == expected_color
394 assert instance.str2color(email) == expected_color
395
395
396
396
397 def test_initials_gravatar_mapping_algo():
397 def test_initials_gravatar_mapping_algo():
398 pos = set()
398 pos = set()
399 instance = InitialsGravatar('', '', '')
399 instance = InitialsGravatar('', '', '')
400 iterations = 0
400 iterations = 0
401
401
402 variations = []
402 variations = []
403 for letter1 in string.ascii_letters:
403 for letter1 in string.ascii_letters:
404 for letter2 in string.ascii_letters[::-1][:10]:
404 for letter2 in string.ascii_letters[::-1][:10]:
405 for letter3 in string.ascii_letters[:10]:
405 for letter3 in string.ascii_letters[:10]:
406 variations.append(
406 variations.append(
407 '%s@rhodecode.com' % (letter1+letter2+letter3))
407 '%s@rhodecode.com' % (letter1+letter2+letter3))
408
408
409 max_variations = 4096
409 max_variations = 4096
410 for email in variations[:max_variations]:
410 for email in variations[:max_variations]:
411 iterations += 1
411 iterations += 1
412 pos.add(
412 pos.add(
413 instance.pick_color_bank_index(email,
413 instance.pick_color_bank_index(email,
414 instance.get_color_bank()))
414 instance.get_color_bank()))
415
415
416 # we assume that we have match all 256 possible positions,
416 # we assume that we have match all 256 possible positions,
417 # in reasonable amount of different email addresses
417 # in reasonable amount of different email addresses
418 assert len(pos) == 256
418 assert len(pos) == 256
419 assert iterations == max_variations
419 assert iterations == max_variations
420
420
421
421
422 @pytest.mark.parametrize("tmpl, repo_name, overrides, prefix, expected", [
422 @pytest.mark.parametrize("tmpl, repo_name, overrides, prefix, expected", [
423 (Repository.DEFAULT_CLONE_URI, 'group/repo1', {}, '', 'http://vps1:8000/group/repo1'),
423 (Repository.DEFAULT_CLONE_URI, 'group/repo1', {}, '', 'http://vps1:8000/group/repo1'),
424 (Repository.DEFAULT_CLONE_URI, 'group/repo1', {'user': 'marcink'}, '', 'http://marcink@vps1:8000/group/repo1'),
424 (Repository.DEFAULT_CLONE_URI, 'group/repo1', {'user': 'marcink'}, '', 'http://marcink@vps1:8000/group/repo1'),
425 (Repository.DEFAULT_CLONE_URI, 'group/repo1', {}, '/rc', 'http://vps1:8000/rc/group/repo1'),
425 (Repository.DEFAULT_CLONE_URI, 'group/repo1', {}, '/rc', 'http://vps1:8000/rc/group/repo1'),
426 (Repository.DEFAULT_CLONE_URI, 'group/repo1', {'user': 'user'}, '/rc', 'http://user@vps1:8000/rc/group/repo1'),
426 (Repository.DEFAULT_CLONE_URI, 'group/repo1', {'user': 'user'}, '/rc', 'http://user@vps1:8000/rc/group/repo1'),
427 (Repository.DEFAULT_CLONE_URI, 'group/repo1', {'user': 'marcink'}, '/rc', 'http://marcink@vps1:8000/rc/group/repo1'),
427 (Repository.DEFAULT_CLONE_URI, 'group/repo1', {'user': 'marcink'}, '/rc', 'http://marcink@vps1:8000/rc/group/repo1'),
428 (Repository.DEFAULT_CLONE_URI, 'group/repo1', {'user': 'user'}, '/rc/', 'http://user@vps1:8000/rc/group/repo1'),
428 (Repository.DEFAULT_CLONE_URI, 'group/repo1', {'user': 'user'}, '/rc/', 'http://user@vps1:8000/rc/group/repo1'),
429 (Repository.DEFAULT_CLONE_URI, 'group/repo1', {'user': 'marcink'}, '/rc/', 'http://marcink@vps1:8000/rc/group/repo1'),
429 (Repository.DEFAULT_CLONE_URI, 'group/repo1', {'user': 'marcink'}, '/rc/', 'http://marcink@vps1:8000/rc/group/repo1'),
430 ('{scheme}://{user}@{netloc}/_{repoid}', 'group/repo1', {}, '', 'http://vps1:8000/_23'),
430 ('{scheme}://{user}@{netloc}/_{repoid}', 'group/repo1', {}, '', 'http://vps1:8000/_23'),
431 ('{scheme}://{user}@{netloc}/_{repoid}', 'group/repo1', {'user': 'marcink'}, '', 'http://marcink@vps1:8000/_23'),
431 ('{scheme}://{user}@{netloc}/_{repoid}', 'group/repo1', {'user': 'marcink'}, '', 'http://marcink@vps1:8000/_23'),
432 ('http://{user}@{netloc}/_{repoid}', 'group/repo1', {'user': 'marcink'}, '', 'http://marcink@vps1:8000/_23'),
432 ('http://{user}@{netloc}/_{repoid}', 'group/repo1', {'user': 'marcink'}, '', 'http://marcink@vps1:8000/_23'),
433 ('http://{netloc}/_{repoid}', 'group/repo1', {'user': 'marcink'}, '', 'http://vps1:8000/_23'),
433 ('http://{netloc}/_{repoid}', 'group/repo1', {'user': 'marcink'}, '', 'http://vps1:8000/_23'),
434 ('https://{user}@proxy1.server.com/{repo}', 'group/repo1', {'user': 'marcink'}, '', 'https://marcink@proxy1.server.com/group/repo1'),
434 ('https://{user}@proxy1.server.com/{repo}', 'group/repo1', {'user': 'marcink'}, '', 'https://marcink@proxy1.server.com/group/repo1'),
435 ('https://{user}@proxy1.server.com/{repo}', 'group/repo1', {}, '', 'https://proxy1.server.com/group/repo1'),
435 ('https://{user}@proxy1.server.com/{repo}', 'group/repo1', {}, '', 'https://proxy1.server.com/group/repo1'),
436 ('https://proxy1.server.com/{user}/{repo}', 'group/repo1', {'user': 'marcink'}, '', 'https://proxy1.server.com/marcink/group/repo1'),
436 ('https://proxy1.server.com/{user}/{repo}', 'group/repo1', {'user': 'marcink'}, '', 'https://proxy1.server.com/marcink/group/repo1'),
437 ])
437 ])
438 def test_clone_url_generator(tmpl, repo_name, overrides, prefix, expected):
438 def test_clone_url_generator(tmpl, repo_name, overrides, prefix, expected):
439 from rhodecode.lib.utils2 import get_clone_url
439 from rhodecode.lib.utils2 import get_clone_url
440
440
441 class RequestStub(object):
441 class RequestStub(object):
442 def request_url(self, name):
442 def request_url(self, name):
443 return 'http://vps1:8000' + prefix
443 return 'http://vps1:8000' + prefix
444
444
445 def route_url(self, name):
445 def route_url(self, name):
446 return self.request_url(name)
446 return self.request_url(name)
447
447
448 clone_url = get_clone_url(
448 clone_url = get_clone_url(
449 request=RequestStub(),
449 request=RequestStub(),
450 uri_tmpl=tmpl,
450 uri_tmpl=tmpl,
451 repo_name=repo_name, repo_id=23, repo_type='hg', **overrides)
451 repo_name=repo_name, repo_id=23, repo_type='hg', **overrides)
452 assert clone_url == expected
452 assert clone_url == expected
453
453
454
454
455 def test_clone_url_svn_ssh_generator():
455 def test_clone_url_svn_ssh_generator():
456 from rhodecode.lib.utils2 import get_clone_url
456 from rhodecode.lib.utils2 import get_clone_url
457
457
458 class RequestStub(object):
458 class RequestStub(object):
459 def request_url(self, name):
459 def request_url(self, name):
460 return 'http://vps1:8000'
460 return 'http://vps1:8000'
461
461
462 def route_url(self, name):
462 def route_url(self, name):
463 return self.request_url(name)
463 return self.request_url(name)
464
464
465 clone_url = get_clone_url(
465 clone_url = get_clone_url(
466 request=RequestStub(),
466 request=RequestStub(),
467 uri_tmpl=Repository.DEFAULT_CLONE_URI_SSH,
467 uri_tmpl=Repository.DEFAULT_CLONE_URI_SSH,
468 repo_name='svn-test', repo_id=23, repo_type='svn', **{'sys_user': 'rcdev'})
468 repo_name='svn-test', repo_id=23, repo_type='svn', **{'sys_user': 'rcdev'})
469 assert clone_url == 'svn+ssh://rcdev@vps1/svn-test'
469 assert clone_url == 'svn+ssh://rcdev@vps1/svn-test'
470
470
471
471
472 idx = 0
472 idx = 0
473
473
474
474
475 def _quick_url(text, tmpl="""<a class="tooltip-hovercard revision-link" href="%s" data-hovercard-alt="Commit: %s" data-hovercard-url="/some-url">%s</a>""", url_=None, commits=''):
475 def _quick_url(text, tmpl="""<a class="tooltip-hovercard revision-link" href="%s" data-hovercard-alt="Commit: %s" data-hovercard-url="/some-url">%s</a>""", url_=None, commits=''):
476 """
476 """
477 Changes `some text url[foo]` => `some text <a href="/">foo</a>
477 Changes `some text url[foo]` => `some text <a href="/">foo</a>
478
478
479 :param text:
479 :param text:
480 """
480 """
481 import re
481 import re
482 # quickly change expected url[] into a link
482 # quickly change expected url[] into a link
483 url_pat = re.compile(r'(?:url\[)(.+?)(?:\])')
483 url_pat = re.compile(r'(?:url\[)(.+?)(?:\])')
484 commits = commits or []
484 commits = commits or []
485
485
486 global idx
486 global idx
487 idx = 0
487 idx = 0
488
488
489 def url_func(match_obj):
489 def url_func(match_obj):
490 global idx
490 global idx
491 _url = match_obj.groups()[0]
491 _url = match_obj.groups()[0]
492 if commits:
492 if commits:
493 commit = commits[idx]
493 commit = commits[idx]
494 idx += 1
494 idx += 1
495 return tmpl % (url_ or '/some-url', _url, commit)
495 return tmpl % (url_ or '/some-url', _url, commit)
496 else:
496 else:
497 return tmpl % (url_ or '/some-url', _url)
497 return tmpl % (url_ or '/some-url', _url)
498
498
499 return url_pat.sub(url_func, text)
499 return url_pat.sub(url_func, text)
500
500
501
501
502 @pytest.mark.parametrize("sample, expected, commits", [
502 @pytest.mark.parametrize("sample, expected, commits", [
503 (
503 (
504 "",
504 "",
505 "",
505 "",
506 [""]
506 [""]
507 ),
507 ),
508 (
508 (
509 "git-svn-id: https://svn.apache.org/repos/asf/libcloud/trunk@1441655 13f79535-47bb-0310-9956-ffa450edef68",
509 "git-svn-id: https://svn.apache.org/repos/asf/libcloud/trunk@1441655 13f79535-47bb-0310-9956-ffa450edef68",
510 "git-svn-id: https://svn.apache.org/repos/asf/libcloud/trunk@1441655 13f79535-47bb-0310-9956-ffa450edef68",
510 "git-svn-id: https://svn.apache.org/repos/asf/libcloud/trunk@1441655 13f79535-47bb-0310-9956-ffa450edef68",
511 [""]
511 [""]
512 ),
512 ),
513 (
513 (
514 "from rev 000000000000",
514 "from rev 000000000000",
515 "from rev url[000000000000]",
515 "from rev url[000000000000]",
516 ["000000000000"]
516 ["000000000000"]
517 ),
517 ),
518
518
519 (
519 (
520 "from rev 000000000000123123 also rev 000000000000",
520 "from rev 000000000000123123 also rev 000000000000",
521 "from rev url[000000000000123123] also rev url[000000000000]",
521 "from rev url[000000000000123123] also rev url[000000000000]",
522 ["000000000000123123", "000000000000"]
522 ["000000000000123123", "000000000000"]
523 ),
523 ),
524 (
524 (
525 "this should-000 00",
525 "this should-000 00",
526 "this should-000 00",
526 "this should-000 00",
527 [""]
527 [""]
528 ),
528 ),
529 (
529 (
530 "longtextffffffffff rev 123123123123",
530 "longtextffffffffff rev 123123123123",
531 "longtextffffffffff rev url[123123123123]",
531 "longtextffffffffff rev url[123123123123]",
532 ["123123123123"]
532 ["123123123123"]
533 ),
533 ),
534 (
534 (
535 "rev ffffffffffffffffffffffffffffffffffffffffffffffffff",
535 "rev ffffffffffffffffffffffffffffffffffffffffffffffffff",
536 "rev ffffffffffffffffffffffffffffffffffffffffffffffffff",
536 "rev ffffffffffffffffffffffffffffffffffffffffffffffffff",
537 ["ffffffffffffffffffffffffffffffffffffffffffffffffff"]
537 ["ffffffffffffffffffffffffffffffffffffffffffffffffff"]
538 ),
538 ),
539 (
539 (
540 "ffffffffffff some text traalaa",
540 "ffffffffffff some text traalaa",
541 "url[ffffffffffff] some text traalaa",
541 "url[ffffffffffff] some text traalaa",
542 ["ffffffffffff"]
542 ["ffffffffffff"]
543 ),
543 ),
544 (
544 (
545 """Multi line
545 """Multi line
546 123123123123
546 123123123123
547 some text 000000000000
547 some text 000000000000
548 sometimes !
548 sometimes !
549 """,
549 """,
550 """Multi line
550 """Multi line
551 url[123123123123]
551 url[123123123123]
552 some text url[000000000000]
552 some text url[000000000000]
553 sometimes !
553 sometimes !
554 """,
554 """,
555 ["123123123123", "000000000000"]
555 ["123123123123", "000000000000"]
556 )
556 )
557 ], ids=no_newline_id_generator)
557 ], ids=no_newline_id_generator)
558 def test_urlify_commits(sample, expected, commits):
558 def test_urlify_commits(sample, expected, commits):
559 def fake_url(self, *args, **kwargs):
559 def fake_url(self, *args, **kwargs):
560 return '/some-url'
560 return '/some-url'
561
561
562 expected = _quick_url(expected, commits=commits)
562 expected = _quick_url(expected, commits=commits)
563
563
564 with mock.patch('rhodecode.lib.helpers.route_url', fake_url):
564 with mock.patch('rhodecode.lib.helpers.route_url', fake_url):
565 from rhodecode.lib.helpers import urlify_commits
565 from rhodecode.lib.helpers import urlify_commits
566 assert urlify_commits(sample, 'repo_name') == expected
566 assert urlify_commits(sample, 'repo_name') == expected
567
567
568
568
569 @pytest.mark.parametrize("sample, expected, url_", [
569 @pytest.mark.parametrize("sample, expected, url_", [
570 ("",
570 ("",
571 "",
571 "",
572 ""),
572 ""),
573 ("https://svn.apache.org/repos",
573 ("https://svn.apache.org/repos",
574 "url[https://svn.apache.org/repos]",
574 "url[https://svn.apache.org/repos]",
575 "https://svn.apache.org/repos"),
575 "https://svn.apache.org/repos"),
576 ("http://svn.apache.org/repos",
576 ("http://svn.apache.org/repos",
577 "url[http://svn.apache.org/repos]",
577 "url[http://svn.apache.org/repos]",
578 "http://svn.apache.org/repos"),
578 "http://svn.apache.org/repos"),
579 ("from rev a also rev http://google.com",
579 ("from rev a also rev http://google.com",
580 "from rev a also rev url[http://google.com]",
580 "from rev a also rev url[http://google.com]",
581 "http://google.com"),
581 "http://google.com"),
582 ("""Multi line
582 ("""Multi line
583 https://foo.bar.com
583 https://foo.bar.com
584 some text lalala""",
584 some text lalala""",
585 """Multi line
585 """Multi line
586 url[https://foo.bar.com]
586 url[https://foo.bar.com]
587 some text lalala""",
587 some text lalala""",
588 "https://foo.bar.com")
588 "https://foo.bar.com")
589 ], ids=no_newline_id_generator)
589 ], ids=no_newline_id_generator)
590 def test_urlify_test(sample, expected, url_):
590 def test_urlify_test(sample, expected, url_):
591 from rhodecode.lib.helpers import urlify_text
591 from rhodecode.lib.helpers import urlify_text
592 expected = _quick_url(expected, tmpl="""<a href="%s">%s</a>""", url_=url_)
592 expected = _quick_url(expected, tmpl="""<a href="%s">%s</a>""", url_=url_)
593 assert urlify_text(sample) == expected
593 assert urlify_text(sample) == expected
594
594
595
595
596 @pytest.mark.parametrize("test, expected", [
596 @pytest.mark.parametrize("test, expected", [
597 ("", None),
597 ("", None),
598 ("/_2", '2'),
598 ("/_2", '2'),
599 ("_2", '2'),
599 ("_2", '2'),
600 ("/_2/", '2'),
600 ("/_2/", '2'),
601 ("_2/", '2'),
601 ("_2/", '2'),
602
602
603 ("/_21", '21'),
603 ("/_21", '21'),
604 ("_21", '21'),
604 ("_21", '21'),
605 ("/_21/", '21'),
605 ("/_21/", '21'),
606 ("_21/", '21'),
606 ("_21/", '21'),
607
607
608 ("/_21/foobar", '21'),
608 ("/_21/foobar", '21'),
609 ("_21/121", '21'),
609 ("_21/121", '21'),
610 ("/_21/_12", '21'),
610 ("/_21/_12", '21'),
611 ("_21/rc/foo", '21'),
611 ("_21/rc/foo", '21'),
612
612
613 ])
613 ])
614 def test_get_repo_by_id(test, expected):
614 def test_get_repo_by_id(test, expected):
615 from rhodecode.model.repo import RepoModel
615 from rhodecode.model.repo import RepoModel
616 _test = RepoModel()._extract_id_from_repo_name(test)
616 _test = RepoModel()._extract_id_from_repo_name(test)
617 assert _test == expected
617 assert _test == expected
618
618
619
619
620 def test_invalidation_context(baseapp):
620 def test_invalidation_context(baseapp):
621 repo_id = 9999
621 repo_id = 9999
622 calls = [1, 2]
623 call_args = ('some-key',)
624 region = rc_cache.get_or_create_region('cache_repo_longterm')
622
625
623 cache_namespace_uid = 'cache_repo_instance.{}_{}'.format(
626 repo_namespace_key = CacheKey.REPO_INVALIDATION_NAMESPACE.format(repo_id=repo_id)
624 repo_id, CacheKey.CACHE_TYPE_FEED)
627 inv_context_manager = rc_cache.InvalidationContext(key=repo_namespace_key)
625 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
626 repo_id=repo_id)
627 region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid)
628
628
629 calls = [1, 2]
629 def cache_generator(_state_uid):
630
630
631 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid)
631 @region.conditional_cache_on_arguments(namespace=f'some-common-namespace-{repo_id}')
632 def _dummy_func(cache_key):
632 def _dummy_func(*args):
633 val = calls.pop(0)
633 val = calls.pop(0)
634 return 'result:{}'.format(val)
634 return _state_uid, f'result:{val}'
635
635
636 inv_context_manager = rc_cache.InvalidationContext(
636 return _dummy_func
637 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace)
638
637
639 # 1st call, fresh caches
638 # 1st call, fresh caches
640 with inv_context_manager as invalidation_context:
639 with inv_context_manager as invalidation_context:
641 should_invalidate = invalidation_context.should_invalidate()
640 cache_state_uid = invalidation_context.state_uid
641 cache_func = cache_generator(cache_state_uid)
642 previous_state_uid, result = cache_func(*call_args)
643
644 should_invalidate = previous_state_uid != cache_state_uid
642 if should_invalidate:
645 if should_invalidate:
643 result = _dummy_func.refresh('some-key')
646 _, result = cache_func.refresh(*call_args)
644 else:
645 result = _dummy_func('some-key')
646
647
647 assert isinstance(invalidation_context, rc_cache.FreshRegionCache)
648 assert should_invalidate is False # 1st call, we don't need to invalidate
648 assert should_invalidate is True
649
649
650 assert 'result:1' == result
650 assert 'result:1' == result
651 # should be cached so calling it twice will give the same result !
651 # should be already cached so calling it twice will give the same result!
652 result = _dummy_func('some-key')
652 _, result = cache_func(*call_args)
653 assert 'result:1' == result
653 assert 'result:1' == result
654
654
655 # 2nd call, we create a new context manager, this should be now key aware, and
655 # 2nd call, we create a new context manager, this should be now key aware, and
656 # return an active cache region
656 # return an active cache region from DB based on the same uid
657 with inv_context_manager as invalidation_context:
657 with inv_context_manager as invalidation_context:
658 should_invalidate = invalidation_context.should_invalidate()
658 cache_state_uid = invalidation_context.state_uid
659 assert isinstance(invalidation_context, rc_cache.ActiveRegionCache)
659 cache_func = cache_generator(cache_state_uid)
660 assert should_invalidate is False
660 previous_state_uid, result = cache_func(*call_args)
661
662 should_invalidate = previous_state_uid != cache_state_uid
663 if should_invalidate:
664 _, result = cache_func.refresh(*call_args)
665
666 assert should_invalidate is False # 1st call, we don't need to invalidate
661
667
662 # Mark invalidation
668 # Mark invalidation
663 CacheKey.set_invalidate(invalidation_namespace)
669 CacheKey.set_invalidate(repo_namespace_key)
664
670
665 # 3nd call, fresh caches
671 # 3nd call, fresh caches
666 with inv_context_manager as invalidation_context:
672 with inv_context_manager as invalidation_context:
667 should_invalidate = invalidation_context.should_invalidate()
673 cache_state_uid = invalidation_context.state_uid
674 cache_func = cache_generator(cache_state_uid)
675 previous_state_uid, result = cache_func(*call_args)
676
677 should_invalidate = previous_state_uid != cache_state_uid
668 if should_invalidate:
678 if should_invalidate:
669 result = _dummy_func.refresh('some-key')
679 _, result = cache_func.refresh(*call_args)
670 else:
671 result = _dummy_func('some-key')
672
680
673 assert isinstance(invalidation_context, rc_cache.FreshRegionCache)
674 assert should_invalidate is True
681 assert should_invalidate is True
675
682
676 assert 'result:2' == result
683 assert 'result:2' == result
677
684
678 # cached again, same result
685 # cached again, same result
679 result = _dummy_func('some-key')
686 _, result = cache_func(*call_args)
680 assert 'result:2' == result
687 assert 'result:2' == result
681
688
682
689
683 def test_invalidation_context_exception_in_compute(baseapp):
690 def test_invalidation_context_exception_in_compute(baseapp):
684 repo_id = 888
691 repo_id = 888
692 region = rc_cache.get_or_create_region('cache_repo_longterm')
685
693
686 cache_namespace_uid = 'cache_repo_instance.{}_{}'.format(
694 repo_namespace_key = CacheKey.REPO_INVALIDATION_NAMESPACE.format(repo_id=repo_id)
687 repo_id, CacheKey.CACHE_TYPE_FEED)
695 inv_context_manager = rc_cache.InvalidationContext(key=repo_namespace_key)
688 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
689 repo_id=repo_id)
690 region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid)
691
696
692 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid)
697 def cache_generator(_state_uid):
693 def _dummy_func(cache_key):
698 @region.conditional_cache_on_arguments(namespace=f'some-common-namespace-{repo_id}')
694 raise Exception('Error in cache func')
699 def _dummy_func(*args):
700 raise Exception('Error in cache func')
701
702 return _dummy_func
695
703
696 with pytest.raises(Exception):
704 with pytest.raises(Exception):
697 inv_context_manager = rc_cache.InvalidationContext(
698 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace)
699
705
700 # 1st call, fresh caches
706 # 1st call, fresh caches
701 with inv_context_manager as invalidation_context:
707 with inv_context_manager as invalidation_context:
702 should_invalidate = invalidation_context.should_invalidate()
708 cache_state_uid = invalidation_context.state_uid
703 if should_invalidate:
709 cache_func = cache_generator(cache_state_uid)
704 _dummy_func.refresh('some-key-2')
710 cache_func(1, 2, 3)
705 else:
706 _dummy_func('some-key-2')
707
711
708
712
709 @pytest.mark.parametrize('execution_number', range(5))
713 @pytest.mark.parametrize('execution_number', range(5))
710 def test_cache_invalidation_race_condition(execution_number, baseapp):
714 def test_cache_invalidation_race_condition(execution_number, baseapp):
711 import time
712
715
713 repo_id = 777
716 repo_id = 777
714
717
715 cache_namespace_uid = 'cache_repo_instance.{}_{}'.format(
718 region = rc_cache.get_or_create_region('cache_repo_longterm')
716 repo_id, CacheKey.CACHE_TYPE_FEED)
719 repo_namespace_key = CacheKey.REPO_INVALIDATION_NAMESPACE.format(repo_id=repo_id)
717 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
718 repo_id=repo_id)
719 region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid)
720
720
721 @run_test_concurrently(25)
721 @run_test_concurrently(25)
722 def test_create_and_delete_cache_keys():
722 def test_create_and_delete_cache_keys():
723 time.sleep(0.2)
723 time.sleep(0.2)
724
724
725 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid)
725 def cache_generator(_state_uid):
726 def _dummy_func(cache_key):
727 val = 'async'
728 return 'result:{}'.format(val)
729
726
730 inv_context_manager = rc_cache.InvalidationContext(
727 @region.conditional_cache_on_arguments(namespace=f'some-common-namespace-{repo_id}')
731 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace)
728 def _dummy_func(*args):
729 return _state_uid, 'result:async'
730
731 return _dummy_func
732
733 inv_context_manager = rc_cache.InvalidationContext(key=repo_namespace_key)
732
734
733 # 1st call, fresh caches
735 # 1st call, fresh caches
734 with inv_context_manager as invalidation_context:
736 with inv_context_manager as invalidation_context:
735 should_invalidate = invalidation_context.should_invalidate()
737 cache_state_uid = invalidation_context.state_uid
738 cache_func = cache_generator(cache_state_uid)
739 previous_state_uid, result = cache_func('doo')
740
741 should_invalidate = previous_state_uid != cache_state_uid
736 if should_invalidate:
742 if should_invalidate:
737 _dummy_func.refresh('some-key-3')
743 _, result = cache_func.refresh('doo')
738 else:
739 _dummy_func('some-key-3')
740
744
741 # Mark invalidation
745 # Mark invalidation
742 CacheKey.set_invalidate(invalidation_namespace)
746 CacheKey.set_invalidate(repo_namespace_key)
743
747
744 test_create_and_delete_cache_keys()
748 test_create_and_delete_cache_keys()
@@ -1,375 +1,373 b''
1
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
7 #
7 #
8 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
11 # GNU General Public License for more details.
12 #
12 #
13 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
15 #
16 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
19
20 """
20 """
21 Test suite for making push/pull operations, on specially modified INI files
21 Test suite for making push/pull operations, on specially modified INI files
22
22
23 .. important::
23 .. important::
24
24
25 You must have git >= 1.8.5 for tests to work fine. With 68b939b git started
25 You must have git >= 1.8.5 for tests to work fine. With 68b939b git started
26 to redirect things to stderr instead of stdout.
26 to redirect things to stderr instead of stdout.
27 """
27 """
28
28
29
29
30 import time
30 import time
31 import logging
31 import logging
32
32
33 import pytest
33 import pytest
34
34
35 from rhodecode.lib import rc_cache
35 from rhodecode.lib import rc_cache
36 from rhodecode.model.auth_token import AuthTokenModel
36 from rhodecode.model.auth_token import AuthTokenModel
37 from rhodecode.model.db import Repository, UserIpMap, CacheKey
37 from rhodecode.model.db import Repository, UserIpMap, CacheKey
38 from rhodecode.model.meta import Session
38 from rhodecode.model.meta import Session
39 from rhodecode.model.repo import RepoModel
39 from rhodecode.model.repo import RepoModel
40 from rhodecode.model.user import UserModel
40 from rhodecode.model.user import UserModel
41 from rhodecode.tests import (GIT_REPO, HG_REPO, TEST_USER_ADMIN_LOGIN)
41 from rhodecode.tests import (GIT_REPO, HG_REPO, TEST_USER_ADMIN_LOGIN)
42 from rhodecode.tests.utils import assert_message_in_log
42 from rhodecode.tests.utils import assert_message_in_log
43
43
44 from rhodecode.tests.vcs_operations import (
44 from rhodecode.tests.vcs_operations import (
45 Command, _check_proper_clone, _check_proper_git_push,
45 Command, _check_proper_clone, _check_proper_git_push,
46 _add_files_and_push, HG_REPO_WITH_GROUP, GIT_REPO_WITH_GROUP)
46 _add_files_and_push, HG_REPO_WITH_GROUP, GIT_REPO_WITH_GROUP)
47
47
48
48
49 @pytest.mark.usefixtures("disable_locking", "disable_anonymous_user")
49 @pytest.mark.usefixtures("disable_locking", "disable_anonymous_user")
50 class TestVCSOperations(object):
50 class TestVCSOperations(object):
51
51
52 def test_clone_hg_repo_by_admin(self, rc_web_server, tmpdir):
52 def test_clone_hg_repo_by_admin(self, rc_web_server, tmpdir):
53 clone_url = rc_web_server.repo_clone_url(HG_REPO)
53 clone_url = rc_web_server.repo_clone_url(HG_REPO)
54 stdout, stderr = Command('/tmp').execute(
54 stdout, stderr = Command('/tmp').execute(
55 'hg clone', clone_url, tmpdir.strpath)
55 'hg clone', clone_url, tmpdir.strpath)
56 _check_proper_clone(stdout, stderr, 'hg')
56 _check_proper_clone(stdout, stderr, 'hg')
57
57
58 def test_clone_hg_repo_by_admin_pull_protocol(self, rc_web_server, tmpdir):
58 def test_clone_hg_repo_by_admin_pull_protocol(self, rc_web_server, tmpdir):
59 clone_url = rc_web_server.repo_clone_url(HG_REPO)
59 clone_url = rc_web_server.repo_clone_url(HG_REPO)
60 stdout, stderr = Command('/tmp').execute(
60 stdout, stderr = Command('/tmp').execute(
61 'hg clone --pull', clone_url, tmpdir.strpath)
61 'hg clone --pull', clone_url, tmpdir.strpath)
62 _check_proper_clone(stdout, stderr, 'hg')
62 _check_proper_clone(stdout, stderr, 'hg')
63
63
64 def test_clone_hg_repo_by_admin_pull_stream_protocol(self, rc_web_server, tmpdir):
64 def test_clone_hg_repo_by_admin_pull_stream_protocol(self, rc_web_server, tmpdir):
65 clone_url = rc_web_server.repo_clone_url(HG_REPO)
65 clone_url = rc_web_server.repo_clone_url(HG_REPO)
66 stdout, stderr = Command('/tmp').execute(
66 stdout, stderr = Command('/tmp').execute(
67 'hg clone --pull --stream', clone_url, tmpdir.strpath)
67 'hg clone --pull --stream', clone_url, tmpdir.strpath)
68 assert 'files to transfer,' in stdout
68 assert 'files to transfer,' in stdout
69 assert 'transferred 1.' in stdout
69 assert 'transferred 1.' in stdout
70 assert '114 files updated,' in stdout
70 assert '114 files updated,' in stdout
71
71
72 def test_clone_git_repo_by_admin(self, rc_web_server, tmpdir):
72 def test_clone_git_repo_by_admin(self, rc_web_server, tmpdir):
73 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
73 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
74 cmd = Command('/tmp')
74 cmd = Command('/tmp')
75 stdout, stderr = cmd.execute('git clone', clone_url, tmpdir.strpath)
75 stdout, stderr = cmd.execute('git clone', clone_url, tmpdir.strpath)
76 _check_proper_clone(stdout, stderr, 'git')
76 _check_proper_clone(stdout, stderr, 'git')
77 cmd.assert_returncode_success()
77 cmd.assert_returncode_success()
78
78
79 def test_clone_git_repo_by_admin_with_git_suffix(self, rc_web_server, tmpdir):
79 def test_clone_git_repo_by_admin_with_git_suffix(self, rc_web_server, tmpdir):
80 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
80 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
81 cmd = Command('/tmp')
81 cmd = Command('/tmp')
82 stdout, stderr = cmd.execute('git clone', clone_url+".git", tmpdir.strpath)
82 stdout, stderr = cmd.execute('git clone', clone_url+".git", tmpdir.strpath)
83 _check_proper_clone(stdout, stderr, 'git')
83 _check_proper_clone(stdout, stderr, 'git')
84 cmd.assert_returncode_success()
84 cmd.assert_returncode_success()
85
85
86 def test_clone_hg_repo_by_id_by_admin(self, rc_web_server, tmpdir):
86 def test_clone_hg_repo_by_id_by_admin(self, rc_web_server, tmpdir):
87 repo_id = Repository.get_by_repo_name(HG_REPO).repo_id
87 repo_id = Repository.get_by_repo_name(HG_REPO).repo_id
88 clone_url = rc_web_server.repo_clone_url('_%s' % repo_id)
88 clone_url = rc_web_server.repo_clone_url('_%s' % repo_id)
89 stdout, stderr = Command('/tmp').execute(
89 stdout, stderr = Command('/tmp').execute(
90 'hg clone', clone_url, tmpdir.strpath)
90 'hg clone', clone_url, tmpdir.strpath)
91 _check_proper_clone(stdout, stderr, 'hg')
91 _check_proper_clone(stdout, stderr, 'hg')
92
92
93 def test_clone_git_repo_by_id_by_admin(self, rc_web_server, tmpdir):
93 def test_clone_git_repo_by_id_by_admin(self, rc_web_server, tmpdir):
94 repo_id = Repository.get_by_repo_name(GIT_REPO).repo_id
94 repo_id = Repository.get_by_repo_name(GIT_REPO).repo_id
95 clone_url = rc_web_server.repo_clone_url('_%s' % repo_id)
95 clone_url = rc_web_server.repo_clone_url('_%s' % repo_id)
96 cmd = Command('/tmp')
96 cmd = Command('/tmp')
97 stdout, stderr = cmd.execute('git clone', clone_url, tmpdir.strpath)
97 stdout, stderr = cmd.execute('git clone', clone_url, tmpdir.strpath)
98 _check_proper_clone(stdout, stderr, 'git')
98 _check_proper_clone(stdout, stderr, 'git')
99 cmd.assert_returncode_success()
99 cmd.assert_returncode_success()
100
100
101 def test_clone_hg_repo_with_group_by_admin(self, rc_web_server, tmpdir):
101 def test_clone_hg_repo_with_group_by_admin(self, rc_web_server, tmpdir):
102 clone_url = rc_web_server.repo_clone_url(HG_REPO_WITH_GROUP)
102 clone_url = rc_web_server.repo_clone_url(HG_REPO_WITH_GROUP)
103 stdout, stderr = Command('/tmp').execute(
103 stdout, stderr = Command('/tmp').execute(
104 'hg clone', clone_url, tmpdir.strpath)
104 'hg clone', clone_url, tmpdir.strpath)
105 _check_proper_clone(stdout, stderr, 'hg')
105 _check_proper_clone(stdout, stderr, 'hg')
106
106
107 def test_clone_git_repo_with_group_by_admin(self, rc_web_server, tmpdir):
107 def test_clone_git_repo_with_group_by_admin(self, rc_web_server, tmpdir):
108 clone_url = rc_web_server.repo_clone_url(GIT_REPO_WITH_GROUP)
108 clone_url = rc_web_server.repo_clone_url(GIT_REPO_WITH_GROUP)
109 cmd = Command('/tmp')
109 cmd = Command('/tmp')
110 stdout, stderr = cmd.execute('git clone', clone_url, tmpdir.strpath)
110 stdout, stderr = cmd.execute('git clone', clone_url, tmpdir.strpath)
111 _check_proper_clone(stdout, stderr, 'git')
111 _check_proper_clone(stdout, stderr, 'git')
112 cmd.assert_returncode_success()
112 cmd.assert_returncode_success()
113
113
114 def test_clone_git_repo_shallow_by_admin(self, rc_web_server, tmpdir):
114 def test_clone_git_repo_shallow_by_admin(self, rc_web_server, tmpdir):
115 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
115 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
116 cmd = Command('/tmp')
116 cmd = Command('/tmp')
117 stdout, stderr = cmd.execute(
117 stdout, stderr = cmd.execute(
118 'git clone --depth=1', clone_url, tmpdir.strpath)
118 'git clone --depth=1', clone_url, tmpdir.strpath)
119
119
120 assert '' == stdout
120 assert '' == stdout
121 assert 'Cloning into' in stderr
121 assert 'Cloning into' in stderr
122 cmd.assert_returncode_success()
122 cmd.assert_returncode_success()
123
123
124 def test_clone_wrong_credentials_hg(self, rc_web_server, tmpdir):
124 def test_clone_wrong_credentials_hg(self, rc_web_server, tmpdir):
125 clone_url = rc_web_server.repo_clone_url(HG_REPO, passwd='bad!')
125 clone_url = rc_web_server.repo_clone_url(HG_REPO, passwd='bad!')
126 stdout, stderr = Command('/tmp').execute(
126 stdout, stderr = Command('/tmp').execute(
127 'hg clone', clone_url, tmpdir.strpath)
127 'hg clone', clone_url, tmpdir.strpath)
128 assert 'abort: authorization failed' in stderr
128 assert 'abort: authorization failed' in stderr
129
129
130 def test_clone_wrong_credentials_git(self, rc_web_server, tmpdir):
130 def test_clone_wrong_credentials_git(self, rc_web_server, tmpdir):
131 clone_url = rc_web_server.repo_clone_url(GIT_REPO, passwd='bad!')
131 clone_url = rc_web_server.repo_clone_url(GIT_REPO, passwd='bad!')
132 stdout, stderr = Command('/tmp').execute(
132 stdout, stderr = Command('/tmp').execute(
133 'git clone', clone_url, tmpdir.strpath)
133 'git clone', clone_url, tmpdir.strpath)
134 assert 'fatal: Authentication failed' in stderr
134 assert 'fatal: Authentication failed' in stderr
135
135
136 def test_clone_git_dir_as_hg(self, rc_web_server, tmpdir):
136 def test_clone_git_dir_as_hg(self, rc_web_server, tmpdir):
137 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
137 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
138 stdout, stderr = Command('/tmp').execute(
138 stdout, stderr = Command('/tmp').execute(
139 'hg clone', clone_url, tmpdir.strpath)
139 'hg clone', clone_url, tmpdir.strpath)
140 assert 'HTTP Error 404: Not Found' in stderr
140 assert 'HTTP Error 404: Not Found' in stderr
141
141
142 def test_clone_hg_repo_as_git(self, rc_web_server, tmpdir):
142 def test_clone_hg_repo_as_git(self, rc_web_server, tmpdir):
143 clone_url = rc_web_server.repo_clone_url(HG_REPO)
143 clone_url = rc_web_server.repo_clone_url(HG_REPO)
144 stdout, stderr = Command('/tmp').execute(
144 stdout, stderr = Command('/tmp').execute(
145 'git clone', clone_url, tmpdir.strpath)
145 'git clone', clone_url, tmpdir.strpath)
146 assert 'not found' in stderr
146 assert 'not found' in stderr
147
147
148 def test_clone_non_existing_path_hg(self, rc_web_server, tmpdir):
148 def test_clone_non_existing_path_hg(self, rc_web_server, tmpdir):
149 clone_url = rc_web_server.repo_clone_url('trololo')
149 clone_url = rc_web_server.repo_clone_url('trololo')
150 stdout, stderr = Command('/tmp').execute(
150 stdout, stderr = Command('/tmp').execute(
151 'hg clone', clone_url, tmpdir.strpath)
151 'hg clone', clone_url, tmpdir.strpath)
152 assert 'HTTP Error 404: Not Found' in stderr
152 assert 'HTTP Error 404: Not Found' in stderr
153
153
154 def test_clone_non_existing_path_git(self, rc_web_server, tmpdir):
154 def test_clone_non_existing_path_git(self, rc_web_server, tmpdir):
155 clone_url = rc_web_server.repo_clone_url('trololo')
155 clone_url = rc_web_server.repo_clone_url('trololo')
156 stdout, stderr = Command('/tmp').execute('git clone', clone_url)
156 stdout, stderr = Command('/tmp').execute('git clone', clone_url)
157 assert 'not found' in stderr
157 assert 'not found' in stderr
158
158
159 def test_clone_hg_with_slashes(self, rc_web_server, tmpdir):
159 def test_clone_hg_with_slashes(self, rc_web_server, tmpdir):
160 clone_url = rc_web_server.repo_clone_url('//' + HG_REPO)
160 clone_url = rc_web_server.repo_clone_url('//' + HG_REPO)
161 stdout, stderr = Command('/tmp').execute('hg clone', clone_url, tmpdir.strpath)
161 stdout, stderr = Command('/tmp').execute('hg clone', clone_url, tmpdir.strpath)
162 assert 'HTTP Error 404: Not Found' in stderr
162 assert 'HTTP Error 404: Not Found' in stderr
163
163
164 def test_clone_git_with_slashes(self, rc_web_server, tmpdir):
164 def test_clone_git_with_slashes(self, rc_web_server, tmpdir):
165 clone_url = rc_web_server.repo_clone_url('//' + GIT_REPO)
165 clone_url = rc_web_server.repo_clone_url('//' + GIT_REPO)
166 stdout, stderr = Command('/tmp').execute('git clone', clone_url)
166 stdout, stderr = Command('/tmp').execute('git clone', clone_url)
167 assert 'not found' in stderr
167 assert 'not found' in stderr
168
168
169 def test_clone_existing_path_hg_not_in_database(
169 def test_clone_existing_path_hg_not_in_database(
170 self, rc_web_server, tmpdir, fs_repo_only):
170 self, rc_web_server, tmpdir, fs_repo_only):
171
171
172 db_name = fs_repo_only('not-in-db-hg', repo_type='hg')
172 db_name = fs_repo_only('not-in-db-hg', repo_type='hg')
173 clone_url = rc_web_server.repo_clone_url(db_name)
173 clone_url = rc_web_server.repo_clone_url(db_name)
174 stdout, stderr = Command('/tmp').execute(
174 stdout, stderr = Command('/tmp').execute(
175 'hg clone', clone_url, tmpdir.strpath)
175 'hg clone', clone_url, tmpdir.strpath)
176 assert 'HTTP Error 404: Not Found' in stderr
176 assert 'HTTP Error 404: Not Found' in stderr
177
177
178 def test_clone_existing_path_git_not_in_database(
178 def test_clone_existing_path_git_not_in_database(
179 self, rc_web_server, tmpdir, fs_repo_only):
179 self, rc_web_server, tmpdir, fs_repo_only):
180 db_name = fs_repo_only('not-in-db-git', repo_type='git')
180 db_name = fs_repo_only('not-in-db-git', repo_type='git')
181 clone_url = rc_web_server.repo_clone_url(db_name)
181 clone_url = rc_web_server.repo_clone_url(db_name)
182 stdout, stderr = Command('/tmp').execute(
182 stdout, stderr = Command('/tmp').execute(
183 'git clone', clone_url, tmpdir.strpath)
183 'git clone', clone_url, tmpdir.strpath)
184 assert 'not found' in stderr
184 assert 'not found' in stderr
185
185
186 def test_clone_existing_path_hg_not_in_database_different_scm(
186 def test_clone_existing_path_hg_not_in_database_different_scm(
187 self, rc_web_server, tmpdir, fs_repo_only):
187 self, rc_web_server, tmpdir, fs_repo_only):
188 db_name = fs_repo_only('not-in-db-git', repo_type='git')
188 db_name = fs_repo_only('not-in-db-git', repo_type='git')
189 clone_url = rc_web_server.repo_clone_url(db_name)
189 clone_url = rc_web_server.repo_clone_url(db_name)
190 stdout, stderr = Command('/tmp').execute(
190 stdout, stderr = Command('/tmp').execute(
191 'hg clone', clone_url, tmpdir.strpath)
191 'hg clone', clone_url, tmpdir.strpath)
192 assert 'HTTP Error 404: Not Found' in stderr
192 assert 'HTTP Error 404: Not Found' in stderr
193
193
194 def test_clone_existing_path_git_not_in_database_different_scm(
194 def test_clone_existing_path_git_not_in_database_different_scm(
195 self, rc_web_server, tmpdir, fs_repo_only):
195 self, rc_web_server, tmpdir, fs_repo_only):
196 db_name = fs_repo_only('not-in-db-hg', repo_type='hg')
196 db_name = fs_repo_only('not-in-db-hg', repo_type='hg')
197 clone_url = rc_web_server.repo_clone_url(db_name)
197 clone_url = rc_web_server.repo_clone_url(db_name)
198 stdout, stderr = Command('/tmp').execute(
198 stdout, stderr = Command('/tmp').execute(
199 'git clone', clone_url, tmpdir.strpath)
199 'git clone', clone_url, tmpdir.strpath)
200 assert 'not found' in stderr
200 assert 'not found' in stderr
201
201
202 def test_clone_non_existing_store_path_hg(self, rc_web_server, tmpdir, user_util):
202 def test_clone_non_existing_store_path_hg(self, rc_web_server, tmpdir, user_util):
203 repo = user_util.create_repo()
203 repo = user_util.create_repo()
204 clone_url = rc_web_server.repo_clone_url(repo.repo_name)
204 clone_url = rc_web_server.repo_clone_url(repo.repo_name)
205
205
206 # Damage repo by removing it's folder
206 # Damage repo by removing it's folder
207 RepoModel()._delete_filesystem_repo(repo)
207 RepoModel()._delete_filesystem_repo(repo)
208
208
209 stdout, stderr = Command('/tmp').execute(
209 stdout, stderr = Command('/tmp').execute(
210 'hg clone', clone_url, tmpdir.strpath)
210 'hg clone', clone_url, tmpdir.strpath)
211 assert 'HTTP Error 404: Not Found' in stderr
211 assert 'HTTP Error 404: Not Found' in stderr
212
212
213 def test_clone_non_existing_store_path_git(self, rc_web_server, tmpdir, user_util):
213 def test_clone_non_existing_store_path_git(self, rc_web_server, tmpdir, user_util):
214 repo = user_util.create_repo(repo_type='git')
214 repo = user_util.create_repo(repo_type='git')
215 clone_url = rc_web_server.repo_clone_url(repo.repo_name)
215 clone_url = rc_web_server.repo_clone_url(repo.repo_name)
216
216
217 # Damage repo by removing it's folder
217 # Damage repo by removing it's folder
218 RepoModel()._delete_filesystem_repo(repo)
218 RepoModel()._delete_filesystem_repo(repo)
219
219
220 stdout, stderr = Command('/tmp').execute(
220 stdout, stderr = Command('/tmp').execute(
221 'git clone', clone_url, tmpdir.strpath)
221 'git clone', clone_url, tmpdir.strpath)
222 assert 'not found' in stderr
222 assert 'not found' in stderr
223
223
224 def test_push_new_file_hg(self, rc_web_server, tmpdir):
224 def test_push_new_file_hg(self, rc_web_server, tmpdir):
225 clone_url = rc_web_server.repo_clone_url(HG_REPO)
225 clone_url = rc_web_server.repo_clone_url(HG_REPO)
226 stdout, stderr = Command('/tmp').execute(
226 stdout, stderr = Command('/tmp').execute(
227 'hg clone', clone_url, tmpdir.strpath)
227 'hg clone', clone_url, tmpdir.strpath)
228
228
229 stdout, stderr = _add_files_and_push(
229 stdout, stderr = _add_files_and_push(
230 'hg', tmpdir.strpath, clone_url=clone_url)
230 'hg', tmpdir.strpath, clone_url=clone_url)
231
231
232 assert 'pushing to' in stdout
232 assert 'pushing to' in stdout
233 assert 'size summary' in stdout
233 assert 'size summary' in stdout
234
234
235 def test_push_new_file_git(self, rc_web_server, tmpdir):
235 def test_push_new_file_git(self, rc_web_server, tmpdir):
236 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
236 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
237 stdout, stderr = Command('/tmp').execute(
237 stdout, stderr = Command('/tmp').execute(
238 'git clone', clone_url, tmpdir.strpath)
238 'git clone', clone_url, tmpdir.strpath)
239
239
240 # commit some stuff into this repo
240 # commit some stuff into this repo
241 stdout, stderr = _add_files_and_push(
241 stdout, stderr = _add_files_and_push(
242 'git', tmpdir.strpath, clone_url=clone_url)
242 'git', tmpdir.strpath, clone_url=clone_url)
243
243
244 _check_proper_git_push(stdout, stderr)
244 _check_proper_git_push(stdout, stderr)
245
245
246 def test_push_invalidates_cache(self, rc_web_server, tmpdir):
246 def test_push_invalidates_cache(self, rc_web_server, tmpdir):
247 hg_repo = Repository.get_by_repo_name(HG_REPO)
247 hg_repo = Repository.get_by_repo_name(HG_REPO)
248
248
249 # init cache objects
249 # init cache objects
250 CacheKey.delete_all_cache()
250 CacheKey.delete_all_cache()
251 cache_namespace_uid = 'cache_push_test.{}'.format(hg_repo.repo_id)
251 cache_namespace_uid = 'cache_push_test.{}'.format(hg_repo.repo_id)
252 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
252 repo_namespace_key = CacheKey.REPO_INVALIDATION_NAMESPACE.format(repo_id=hg_repo.repo_id)
253 repo_id=hg_repo.repo_id)
254
253
255 inv_context_manager = rc_cache.InvalidationContext(
254 inv_context_manager = rc_cache.InvalidationContext(key=repo_namespace_key)
256 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace)
257
255
258 with inv_context_manager as invalidation_context:
256 with inv_context_manager as invalidation_context:
259 # __enter__ will create and register cache objects
257 # __enter__ will create and register cache objects
260 pass
258 pass
261
259
262 # clone to init cache
260 # clone to init cache
263 clone_url = rc_web_server.repo_clone_url(hg_repo.repo_name)
261 clone_url = rc_web_server.repo_clone_url(hg_repo.repo_name)
264 stdout, stderr = Command('/tmp').execute(
262 stdout, stderr = Command('/tmp').execute(
265 'hg clone', clone_url, tmpdir.strpath)
263 'hg clone', clone_url, tmpdir.strpath)
266
264
267 cache_keys = hg_repo.cache_keys
265 cache_keys = hg_repo.cache_keys
268 assert cache_keys != []
266 assert cache_keys != []
269 for key in cache_keys:
267 for key in cache_keys:
270 assert key.cache_active is True
268 assert key.cache_active is True
271
269
272 # PUSH that should trigger invalidation cache
270 # PUSH that should trigger invalidation cache
273 stdout, stderr = _add_files_and_push(
271 stdout, stderr = _add_files_and_push(
274 'hg', tmpdir.strpath, clone_url=clone_url, files_no=1)
272 'hg', tmpdir.strpath, clone_url=clone_url, files_no=1)
275
273
276 # flush...
274 # flush...
277 Session().commit()
275 Session().commit()
278 hg_repo = Repository.get_by_repo_name(HG_REPO)
276 hg_repo = Repository.get_by_repo_name(HG_REPO)
279 cache_keys = hg_repo.cache_keys
277 cache_keys = hg_repo.cache_keys
280 assert cache_keys != []
278 assert cache_keys != []
281 for key in cache_keys:
279 for key in cache_keys:
282 # keys should be marked as not active
280 # keys should be marked as not active
283 assert key.cache_active is False
281 assert key.cache_active is False
284
282
285 def test_push_wrong_credentials_hg(self, rc_web_server, tmpdir):
283 def test_push_wrong_credentials_hg(self, rc_web_server, tmpdir):
286 clone_url = rc_web_server.repo_clone_url(HG_REPO)
284 clone_url = rc_web_server.repo_clone_url(HG_REPO)
287 stdout, stderr = Command('/tmp').execute(
285 stdout, stderr = Command('/tmp').execute(
288 'hg clone', clone_url, tmpdir.strpath)
286 'hg clone', clone_url, tmpdir.strpath)
289
287
290 push_url = rc_web_server.repo_clone_url(
288 push_url = rc_web_server.repo_clone_url(
291 HG_REPO, user='bad', passwd='name')
289 HG_REPO, user='bad', passwd='name')
292 stdout, stderr = _add_files_and_push(
290 stdout, stderr = _add_files_and_push(
293 'hg', tmpdir.strpath, clone_url=push_url)
291 'hg', tmpdir.strpath, clone_url=push_url)
294
292
295 assert 'abort: authorization failed' in stderr
293 assert 'abort: authorization failed' in stderr
296
294
297 def test_push_wrong_credentials_git(self, rc_web_server, tmpdir):
295 def test_push_wrong_credentials_git(self, rc_web_server, tmpdir):
298 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
296 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
299 stdout, stderr = Command('/tmp').execute(
297 stdout, stderr = Command('/tmp').execute(
300 'git clone', clone_url, tmpdir.strpath)
298 'git clone', clone_url, tmpdir.strpath)
301
299
302 push_url = rc_web_server.repo_clone_url(
300 push_url = rc_web_server.repo_clone_url(
303 GIT_REPO, user='bad', passwd='name')
301 GIT_REPO, user='bad', passwd='name')
304 stdout, stderr = _add_files_and_push(
302 stdout, stderr = _add_files_and_push(
305 'git', tmpdir.strpath, clone_url=push_url)
303 'git', tmpdir.strpath, clone_url=push_url)
306
304
307 assert 'fatal: Authentication failed' in stderr
305 assert 'fatal: Authentication failed' in stderr
308
306
309 def test_push_back_to_wrong_url_hg(self, rc_web_server, tmpdir):
307 def test_push_back_to_wrong_url_hg(self, rc_web_server, tmpdir):
310 clone_url = rc_web_server.repo_clone_url(HG_REPO)
308 clone_url = rc_web_server.repo_clone_url(HG_REPO)
311 stdout, stderr = Command('/tmp').execute(
309 stdout, stderr = Command('/tmp').execute(
312 'hg clone', clone_url, tmpdir.strpath)
310 'hg clone', clone_url, tmpdir.strpath)
313
311
314 stdout, stderr = _add_files_and_push(
312 stdout, stderr = _add_files_and_push(
315 'hg', tmpdir.strpath,
313 'hg', tmpdir.strpath,
316 clone_url=rc_web_server.repo_clone_url('not-existing'))
314 clone_url=rc_web_server.repo_clone_url('not-existing'))
317
315
318 assert 'HTTP Error 404: Not Found' in stderr
316 assert 'HTTP Error 404: Not Found' in stderr
319
317
320 def test_push_back_to_wrong_url_git(self, rc_web_server, tmpdir):
318 def test_push_back_to_wrong_url_git(self, rc_web_server, tmpdir):
321 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
319 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
322 stdout, stderr = Command('/tmp').execute(
320 stdout, stderr = Command('/tmp').execute(
323 'git clone', clone_url, tmpdir.strpath)
321 'git clone', clone_url, tmpdir.strpath)
324
322
325 stdout, stderr = _add_files_and_push(
323 stdout, stderr = _add_files_and_push(
326 'git', tmpdir.strpath,
324 'git', tmpdir.strpath,
327 clone_url=rc_web_server.repo_clone_url('not-existing'))
325 clone_url=rc_web_server.repo_clone_url('not-existing'))
328
326
329 assert 'not found' in stderr
327 assert 'not found' in stderr
330
328
331 def test_ip_restriction_hg(self, rc_web_server, tmpdir):
329 def test_ip_restriction_hg(self, rc_web_server, tmpdir):
332 user_model = UserModel()
330 user_model = UserModel()
333 try:
331 try:
334 user_model.add_extra_ip(TEST_USER_ADMIN_LOGIN, '10.10.10.10/32')
332 user_model.add_extra_ip(TEST_USER_ADMIN_LOGIN, '10.10.10.10/32')
335 Session().commit()
333 Session().commit()
336 time.sleep(2)
334 time.sleep(2)
337 clone_url = rc_web_server.repo_clone_url(HG_REPO)
335 clone_url = rc_web_server.repo_clone_url(HG_REPO)
338 stdout, stderr = Command('/tmp').execute(
336 stdout, stderr = Command('/tmp').execute(
339 'hg clone', clone_url, tmpdir.strpath)
337 'hg clone', clone_url, tmpdir.strpath)
340 assert 'abort: HTTP Error 403: Forbidden' in stderr
338 assert 'abort: HTTP Error 403: Forbidden' in stderr
341 finally:
339 finally:
342 # release IP restrictions
340 # release IP restrictions
343 for ip in UserIpMap.getAll():
341 for ip in UserIpMap.getAll():
344 UserIpMap.delete(ip.ip_id)
342 UserIpMap.delete(ip.ip_id)
345 Session().commit()
343 Session().commit()
346
344
347 time.sleep(2)
345 time.sleep(2)
348
346
349 stdout, stderr = Command('/tmp').execute(
347 stdout, stderr = Command('/tmp').execute(
350 'hg clone', clone_url, tmpdir.strpath)
348 'hg clone', clone_url, tmpdir.strpath)
351 _check_proper_clone(stdout, stderr, 'hg')
349 _check_proper_clone(stdout, stderr, 'hg')
352
350
353 def test_ip_restriction_git(self, rc_web_server, tmpdir):
351 def test_ip_restriction_git(self, rc_web_server, tmpdir):
354 user_model = UserModel()
352 user_model = UserModel()
355 try:
353 try:
356 user_model.add_extra_ip(TEST_USER_ADMIN_LOGIN, '10.10.10.10/32')
354 user_model.add_extra_ip(TEST_USER_ADMIN_LOGIN, '10.10.10.10/32')
357 Session().commit()
355 Session().commit()
358 time.sleep(2)
356 time.sleep(2)
359 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
357 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
360 stdout, stderr = Command('/tmp').execute(
358 stdout, stderr = Command('/tmp').execute(
361 'git clone', clone_url, tmpdir.strpath)
359 'git clone', clone_url, tmpdir.strpath)
362 msg = "The requested URL returned error: 403"
360 msg = "The requested URL returned error: 403"
363 assert msg in stderr
361 assert msg in stderr
364 finally:
362 finally:
365 # release IP restrictions
363 # release IP restrictions
366 for ip in UserIpMap.getAll():
364 for ip in UserIpMap.getAll():
367 UserIpMap.delete(ip.ip_id)
365 UserIpMap.delete(ip.ip_id)
368 Session().commit()
366 Session().commit()
369
367
370 time.sleep(2)
368 time.sleep(2)
371
369
372 cmd = Command('/tmp')
370 cmd = Command('/tmp')
373 stdout, stderr = cmd.execute('git clone', clone_url, tmpdir.strpath)
371 stdout, stderr = cmd.execute('git clone', clone_url, tmpdir.strpath)
374 cmd.assert_returncode_success()
372 cmd.assert_returncode_success()
375 _check_proper_clone(stdout, stderr, 'git')
373 _check_proper_clone(stdout, stderr, 'git')
General Comments 0
You need to be logged in to leave comments. Login now